nvan15 commited on
Commit
7079e03
·
verified ·
1 Parent(s): 7829988

Batch upload part 27

Browse files
Files changed (50) hide show
  1. nlu/glue_exp/mrpc/1dr0.15,mlr2e-04,clr2e-03,ep=30.0t=18d22h41m02/ft/adapter_config.json +26 -0
  2. nlu/glue_exp/mrpc/1dr0.15,mlr2e-04,clr2e-03,ep=30.0t=18d22h41m02/ft2/adapter_config.json +26 -0
  3. nlu/glue_exp/mrpc/1dr0.15,mlr2e-04,clr2e-03,ep=30.0t=18d22h41m02/ft2/adapter_model.bin +3 -0
  4. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/MRPC.tsv +1726 -0
  5. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/all_results.json +11 -0
  6. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/eval_results.json +11 -0
  7. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/adapter_config.json +26 -0
  8. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/added_tokens.json +3 -0
  9. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/special_tokens_map.json +15 -0
  10. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/spm.model +3 -0
  11. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/tokenizer.json +0 -0
  12. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/tokenizer_config.json +60 -0
  13. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft2/adapter_config.json +26 -0
  14. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft2/adapter_model.bin +3 -0
  15. nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/trainer_state.json +655 -0
  16. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/MRPC.tsv +1726 -0
  17. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/all_results.json +11 -0
  18. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/eval_results.json +11 -0
  19. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/adapter_config.json +26 -0
  20. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/added_tokens.json +3 -0
  21. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/special_tokens_map.json +15 -0
  22. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/spm.model +3 -0
  23. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/tokenizer.json +0 -0
  24. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/tokenizer_config.json +60 -0
  25. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft2/adapter_config.json +26 -0
  26. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft2/adapter_model.bin +3 -0
  27. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/trainer_state.json +655 -0
  28. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/MRPC.tsv +1726 -0
  29. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/all_results.json +11 -0
  30. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/eval_results.json +11 -0
  31. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/adapter_config.json +26 -0
  32. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/added_tokens.json +3 -0
  33. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/special_tokens_map.json +15 -0
  34. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/spm.model +3 -0
  35. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/tokenizer.json +0 -0
  36. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/tokenizer_config.json +60 -0
  37. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft2/adapter_config.json +26 -0
  38. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft2/adapter_model.bin +3 -0
  39. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/trainer_state.json +655 -0
  40. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/MRPC.tsv +1726 -0
  41. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/all_results.json +11 -0
  42. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/eval_results.json +11 -0
  43. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/adapter_config.json +26 -0
  44. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/added_tokens.json +3 -0
  45. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/special_tokens_map.json +15 -0
  46. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/spm.model +3 -0
  47. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/tokenizer.json +0 -0
  48. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/tokenizer_config.json +60 -0
  49. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft2/adapter_model.bin +3 -0
  50. nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/trainer_state.json +655 -0
nlu/glue_exp/mrpc/1dr0.15,mlr2e-04,clr2e-03,ep=30.0t=18d22h41m02/ft/adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "T": 1.0,
3
+ "base_model_name_or_path": "microsoft/deberta-v3-base",
4
+ "bias": "none",
5
+ "drop_out": 0.15,
6
+ "inference_mode": false,
7
+ "layers_to_transform": null,
8
+ "modules_to_save": [
9
+ "classifier",
10
+ "pooler"
11
+ ],
12
+ "num_rotations": 1,
13
+ "peft_type": "ROTATION",
14
+ "r": 4,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "output.dense",
18
+ "attention.output.dense",
19
+ "intermediate.dense",
20
+ "query_proj",
21
+ "key_proj",
22
+ "value_proj"
23
+ ],
24
+ "target_modules_to_skip": null,
25
+ "task_type": "SEQ_CLS"
26
+ }
nlu/glue_exp/mrpc/1dr0.15,mlr2e-04,clr2e-03,ep=30.0t=18d22h41m02/ft2/adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "T": 1.0,
3
+ "base_model_name_or_path": "microsoft/deberta-v3-base",
4
+ "bias": "none",
5
+ "drop_out": 0.15,
6
+ "inference_mode": true,
7
+ "layers_to_transform": null,
8
+ "modules_to_save": [
9
+ "classifier",
10
+ "pooler"
11
+ ],
12
+ "num_rotations": 1,
13
+ "peft_type": "ROTATION",
14
+ "r": 4,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "output.dense",
18
+ "attention.output.dense",
19
+ "intermediate.dense",
20
+ "query_proj",
21
+ "key_proj",
22
+ "value_proj"
23
+ ],
24
+ "target_modules_to_skip": null,
25
+ "task_type": "SEQ_CLS"
26
+ }
nlu/glue_exp/mrpc/1dr0.15,mlr2e-04,clr2e-03,ep=30.0t=18d22h41m02/ft2/adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2dcdd19e8394363f6c95f5e8945212598c8421cd08f2012465c1a7cef07f05b7
3
+ size 7449859
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/MRPC.tsv ADDED
@@ -0,0 +1,1726 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ index prediction
2
+ 0 1
3
+ 1 1
4
+ 2 1
5
+ 3 1
6
+ 4 0
7
+ 5 1
8
+ 6 0
9
+ 7 0
10
+ 8 1
11
+ 9 0
12
+ 10 1
13
+ 11 1
14
+ 12 1
15
+ 13 0
16
+ 14 1
17
+ 15 1
18
+ 16 1
19
+ 17 1
20
+ 18 1
21
+ 19 1
22
+ 20 1
23
+ 21 1
24
+ 22 1
25
+ 23 1
26
+ 24 0
27
+ 25 1
28
+ 26 1
29
+ 27 1
30
+ 28 0
31
+ 29 1
32
+ 30 1
33
+ 31 0
34
+ 32 0
35
+ 33 0
36
+ 34 0
37
+ 35 0
38
+ 36 1
39
+ 37 1
40
+ 38 0
41
+ 39 1
42
+ 40 1
43
+ 41 1
44
+ 42 0
45
+ 43 1
46
+ 44 1
47
+ 45 0
48
+ 46 0
49
+ 47 0
50
+ 48 1
51
+ 49 1
52
+ 50 1
53
+ 51 1
54
+ 52 1
55
+ 53 1
56
+ 54 0
57
+ 55 1
58
+ 56 1
59
+ 57 1
60
+ 58 0
61
+ 59 1
62
+ 60 1
63
+ 61 1
64
+ 62 1
65
+ 63 1
66
+ 64 1
67
+ 65 1
68
+ 66 1
69
+ 67 1
70
+ 68 1
71
+ 69 1
72
+ 70 0
73
+ 71 1
74
+ 72 1
75
+ 73 0
76
+ 74 1
77
+ 75 0
78
+ 76 1
79
+ 77 0
80
+ 78 0
81
+ 79 0
82
+ 80 0
83
+ 81 0
84
+ 82 1
85
+ 83 0
86
+ 84 0
87
+ 85 0
88
+ 86 0
89
+ 87 1
90
+ 88 0
91
+ 89 1
92
+ 90 0
93
+ 91 0
94
+ 92 1
95
+ 93 1
96
+ 94 1
97
+ 95 0
98
+ 96 1
99
+ 97 1
100
+ 98 0
101
+ 99 1
102
+ 100 1
103
+ 101 1
104
+ 102 1
105
+ 103 1
106
+ 104 1
107
+ 105 0
108
+ 106 0
109
+ 107 1
110
+ 108 1
111
+ 109 1
112
+ 110 1
113
+ 111 0
114
+ 112 0
115
+ 113 1
116
+ 114 1
117
+ 115 0
118
+ 116 0
119
+ 117 0
120
+ 118 1
121
+ 119 1
122
+ 120 1
123
+ 121 0
124
+ 122 1
125
+ 123 0
126
+ 124 1
127
+ 125 1
128
+ 126 1
129
+ 127 1
130
+ 128 0
131
+ 129 1
132
+ 130 0
133
+ 131 1
134
+ 132 1
135
+ 133 0
136
+ 134 1
137
+ 135 0
138
+ 136 0
139
+ 137 1
140
+ 138 1
141
+ 139 1
142
+ 140 1
143
+ 141 1
144
+ 142 1
145
+ 143 1
146
+ 144 1
147
+ 145 1
148
+ 146 0
149
+ 147 1
150
+ 148 1
151
+ 149 1
152
+ 150 1
153
+ 151 1
154
+ 152 0
155
+ 153 0
156
+ 154 1
157
+ 155 1
158
+ 156 1
159
+ 157 1
160
+ 158 0
161
+ 159 1
162
+ 160 1
163
+ 161 1
164
+ 162 1
165
+ 163 1
166
+ 164 0
167
+ 165 1
168
+ 166 1
169
+ 167 0
170
+ 168 1
171
+ 169 1
172
+ 170 1
173
+ 171 1
174
+ 172 1
175
+ 173 0
176
+ 174 1
177
+ 175 1
178
+ 176 1
179
+ 177 0
180
+ 178 1
181
+ 179 1
182
+ 180 0
183
+ 181 0
184
+ 182 0
185
+ 183 1
186
+ 184 1
187
+ 185 1
188
+ 186 0
189
+ 187 0
190
+ 188 1
191
+ 189 1
192
+ 190 0
193
+ 191 0
194
+ 192 1
195
+ 193 1
196
+ 194 0
197
+ 195 0
198
+ 196 0
199
+ 197 1
200
+ 198 1
201
+ 199 0
202
+ 200 0
203
+ 201 1
204
+ 202 1
205
+ 203 1
206
+ 204 0
207
+ 205 0
208
+ 206 1
209
+ 207 1
210
+ 208 0
211
+ 209 1
212
+ 210 0
213
+ 211 1
214
+ 212 1
215
+ 213 0
216
+ 214 1
217
+ 215 0
218
+ 216 1
219
+ 217 1
220
+ 218 1
221
+ 219 1
222
+ 220 1
223
+ 221 0
224
+ 222 1
225
+ 223 0
226
+ 224 1
227
+ 225 1
228
+ 226 1
229
+ 227 1
230
+ 228 1
231
+ 229 0
232
+ 230 1
233
+ 231 0
234
+ 232 1
235
+ 233 1
236
+ 234 0
237
+ 235 0
238
+ 236 1
239
+ 237 1
240
+ 238 0
241
+ 239 0
242
+ 240 1
243
+ 241 1
244
+ 242 1
245
+ 243 1
246
+ 244 0
247
+ 245 1
248
+ 246 1
249
+ 247 0
250
+ 248 1
251
+ 249 1
252
+ 250 1
253
+ 251 1
254
+ 252 0
255
+ 253 1
256
+ 254 1
257
+ 255 1
258
+ 256 1
259
+ 257 1
260
+ 258 1
261
+ 259 1
262
+ 260 0
263
+ 261 1
264
+ 262 1
265
+ 263 1
266
+ 264 0
267
+ 265 1
268
+ 266 1
269
+ 267 1
270
+ 268 1
271
+ 269 1
272
+ 270 1
273
+ 271 0
274
+ 272 1
275
+ 273 0
276
+ 274 0
277
+ 275 1
278
+ 276 0
279
+ 277 1
280
+ 278 1
281
+ 279 1
282
+ 280 1
283
+ 281 0
284
+ 282 1
285
+ 283 1
286
+ 284 1
287
+ 285 0
288
+ 286 1
289
+ 287 0
290
+ 288 0
291
+ 289 0
292
+ 290 1
293
+ 291 1
294
+ 292 1
295
+ 293 1
296
+ 294 1
297
+ 295 0
298
+ 296 0
299
+ 297 0
300
+ 298 1
301
+ 299 0
302
+ 300 1
303
+ 301 1
304
+ 302 0
305
+ 303 1
306
+ 304 1
307
+ 305 0
308
+ 306 1
309
+ 307 1
310
+ 308 0
311
+ 309 1
312
+ 310 1
313
+ 311 1
314
+ 312 0
315
+ 313 1
316
+ 314 1
317
+ 315 1
318
+ 316 1
319
+ 317 1
320
+ 318 1
321
+ 319 0
322
+ 320 1
323
+ 321 1
324
+ 322 1
325
+ 323 1
326
+ 324 1
327
+ 325 1
328
+ 326 1
329
+ 327 0
330
+ 328 0
331
+ 329 0
332
+ 330 0
333
+ 331 1
334
+ 332 1
335
+ 333 1
336
+ 334 1
337
+ 335 0
338
+ 336 1
339
+ 337 1
340
+ 338 1
341
+ 339 1
342
+ 340 1
343
+ 341 1
344
+ 342 1
345
+ 343 1
346
+ 344 0
347
+ 345 1
348
+ 346 0
349
+ 347 0
350
+ 348 0
351
+ 349 1
352
+ 350 1
353
+ 351 1
354
+ 352 0
355
+ 353 1
356
+ 354 0
357
+ 355 0
358
+ 356 0
359
+ 357 0
360
+ 358 1
361
+ 359 0
362
+ 360 0
363
+ 361 0
364
+ 362 1
365
+ 363 1
366
+ 364 0
367
+ 365 1
368
+ 366 0
369
+ 367 0
370
+ 368 0
371
+ 369 1
372
+ 370 0
373
+ 371 1
374
+ 372 1
375
+ 373 1
376
+ 374 1
377
+ 375 0
378
+ 376 0
379
+ 377 1
380
+ 378 0
381
+ 379 0
382
+ 380 1
383
+ 381 1
384
+ 382 0
385
+ 383 1
386
+ 384 0
387
+ 385 0
388
+ 386 1
389
+ 387 0
390
+ 388 1
391
+ 389 1
392
+ 390 1
393
+ 391 0
394
+ 392 0
395
+ 393 1
396
+ 394 0
397
+ 395 1
398
+ 396 1
399
+ 397 1
400
+ 398 0
401
+ 399 0
402
+ 400 1
403
+ 401 1
404
+ 402 1
405
+ 403 1
406
+ 404 1
407
+ 405 0
408
+ 406 0
409
+ 407 1
410
+ 408 1
411
+ 409 1
412
+ 410 0
413
+ 411 1
414
+ 412 1
415
+ 413 1
416
+ 414 1
417
+ 415 0
418
+ 416 0
419
+ 417 1
420
+ 418 1
421
+ 419 1
422
+ 420 1
423
+ 421 1
424
+ 422 1
425
+ 423 1
426
+ 424 0
427
+ 425 0
428
+ 426 0
429
+ 427 1
430
+ 428 0
431
+ 429 1
432
+ 430 0
433
+ 431 1
434
+ 432 1
435
+ 433 1
436
+ 434 1
437
+ 435 1
438
+ 436 1
439
+ 437 0
440
+ 438 1
441
+ 439 0
442
+ 440 0
443
+ 441 0
444
+ 442 1
445
+ 443 1
446
+ 444 1
447
+ 445 1
448
+ 446 1
449
+ 447 1
450
+ 448 1
451
+ 449 1
452
+ 450 0
453
+ 451 1
454
+ 452 1
455
+ 453 1
456
+ 454 1
457
+ 455 1
458
+ 456 1
459
+ 457 1
460
+ 458 0
461
+ 459 1
462
+ 460 1
463
+ 461 1
464
+ 462 1
465
+ 463 0
466
+ 464 0
467
+ 465 0
468
+ 466 1
469
+ 467 1
470
+ 468 1
471
+ 469 1
472
+ 470 0
473
+ 471 1
474
+ 472 1
475
+ 473 1
476
+ 474 1
477
+ 475 1
478
+ 476 1
479
+ 477 1
480
+ 478 1
481
+ 479 1
482
+ 480 0
483
+ 481 1
484
+ 482 1
485
+ 483 1
486
+ 484 0
487
+ 485 1
488
+ 486 1
489
+ 487 1
490
+ 488 1
491
+ 489 1
492
+ 490 1
493
+ 491 1
494
+ 492 1
495
+ 493 1
496
+ 494 0
497
+ 495 1
498
+ 496 1
499
+ 497 1
500
+ 498 0
501
+ 499 1
502
+ 500 1
503
+ 501 1
504
+ 502 1
505
+ 503 0
506
+ 504 1
507
+ 505 1
508
+ 506 0
509
+ 507 1
510
+ 508 0
511
+ 509 0
512
+ 510 1
513
+ 511 1
514
+ 512 0
515
+ 513 1
516
+ 514 1
517
+ 515 1
518
+ 516 1
519
+ 517 0
520
+ 518 1
521
+ 519 1
522
+ 520 1
523
+ 521 1
524
+ 522 1
525
+ 523 1
526
+ 524 1
527
+ 525 1
528
+ 526 1
529
+ 527 1
530
+ 528 1
531
+ 529 1
532
+ 530 1
533
+ 531 1
534
+ 532 1
535
+ 533 0
536
+ 534 1
537
+ 535 1
538
+ 536 1
539
+ 537 1
540
+ 538 0
541
+ 539 1
542
+ 540 1
543
+ 541 1
544
+ 542 1
545
+ 543 1
546
+ 544 1
547
+ 545 1
548
+ 546 0
549
+ 547 0
550
+ 548 0
551
+ 549 0
552
+ 550 1
553
+ 551 1
554
+ 552 0
555
+ 553 1
556
+ 554 0
557
+ 555 1
558
+ 556 0
559
+ 557 1
560
+ 558 1
561
+ 559 1
562
+ 560 1
563
+ 561 0
564
+ 562 0
565
+ 563 0
566
+ 564 1
567
+ 565 1
568
+ 566 1
569
+ 567 1
570
+ 568 0
571
+ 569 0
572
+ 570 0
573
+ 571 1
574
+ 572 1
575
+ 573 1
576
+ 574 1
577
+ 575 1
578
+ 576 1
579
+ 577 0
580
+ 578 1
581
+ 579 1
582
+ 580 1
583
+ 581 1
584
+ 582 1
585
+ 583 1
586
+ 584 1
587
+ 585 1
588
+ 586 0
589
+ 587 1
590
+ 588 1
591
+ 589 1
592
+ 590 1
593
+ 591 1
594
+ 592 1
595
+ 593 0
596
+ 594 0
597
+ 595 1
598
+ 596 1
599
+ 597 0
600
+ 598 1
601
+ 599 1
602
+ 600 1
603
+ 601 1
604
+ 602 1
605
+ 603 1
606
+ 604 1
607
+ 605 1
608
+ 606 0
609
+ 607 1
610
+ 608 0
611
+ 609 1
612
+ 610 1
613
+ 611 1
614
+ 612 0
615
+ 613 1
616
+ 614 1
617
+ 615 1
618
+ 616 1
619
+ 617 0
620
+ 618 1
621
+ 619 1
622
+ 620 1
623
+ 621 0
624
+ 622 1
625
+ 623 0
626
+ 624 1
627
+ 625 0
628
+ 626 1
629
+ 627 1
630
+ 628 1
631
+ 629 1
632
+ 630 0
633
+ 631 1
634
+ 632 1
635
+ 633 0
636
+ 634 0
637
+ 635 1
638
+ 636 0
639
+ 637 1
640
+ 638 1
641
+ 639 1
642
+ 640 1
643
+ 641 1
644
+ 642 1
645
+ 643 1
646
+ 644 1
647
+ 645 1
648
+ 646 1
649
+ 647 1
650
+ 648 1
651
+ 649 0
652
+ 650 1
653
+ 651 0
654
+ 652 0
655
+ 653 1
656
+ 654 1
657
+ 655 1
658
+ 656 1
659
+ 657 1
660
+ 658 1
661
+ 659 1
662
+ 660 1
663
+ 661 1
664
+ 662 1
665
+ 663 1
666
+ 664 1
667
+ 665 0
668
+ 666 1
669
+ 667 0
670
+ 668 0
671
+ 669 0
672
+ 670 1
673
+ 671 1
674
+ 672 0
675
+ 673 1
676
+ 674 1
677
+ 675 0
678
+ 676 1
679
+ 677 1
680
+ 678 1
681
+ 679 1
682
+ 680 0
683
+ 681 1
684
+ 682 1
685
+ 683 1
686
+ 684 1
687
+ 685 0
688
+ 686 1
689
+ 687 0
690
+ 688 1
691
+ 689 1
692
+ 690 1
693
+ 691 1
694
+ 692 0
695
+ 693 1
696
+ 694 0
697
+ 695 0
698
+ 696 1
699
+ 697 1
700
+ 698 0
701
+ 699 1
702
+ 700 0
703
+ 701 1
704
+ 702 1
705
+ 703 0
706
+ 704 1
707
+ 705 0
708
+ 706 0
709
+ 707 0
710
+ 708 1
711
+ 709 1
712
+ 710 0
713
+ 711 0
714
+ 712 1
715
+ 713 1
716
+ 714 1
717
+ 715 1
718
+ 716 1
719
+ 717 0
720
+ 718 1
721
+ 719 1
722
+ 720 1
723
+ 721 1
724
+ 722 0
725
+ 723 0
726
+ 724 1
727
+ 725 1
728
+ 726 1
729
+ 727 1
730
+ 728 1
731
+ 729 1
732
+ 730 1
733
+ 731 1
734
+ 732 1
735
+ 733 0
736
+ 734 1
737
+ 735 1
738
+ 736 1
739
+ 737 1
740
+ 738 1
741
+ 739 1
742
+ 740 1
743
+ 741 1
744
+ 742 1
745
+ 743 1
746
+ 744 1
747
+ 745 0
748
+ 746 1
749
+ 747 0
750
+ 748 1
751
+ 749 0
752
+ 750 1
753
+ 751 1
754
+ 752 1
755
+ 753 0
756
+ 754 1
757
+ 755 1
758
+ 756 1
759
+ 757 1
760
+ 758 1
761
+ 759 1
762
+ 760 1
763
+ 761 1
764
+ 762 1
765
+ 763 1
766
+ 764 1
767
+ 765 1
768
+ 766 1
769
+ 767 1
770
+ 768 1
771
+ 769 1
772
+ 770 1
773
+ 771 1
774
+ 772 1
775
+ 773 1
776
+ 774 1
777
+ 775 1
778
+ 776 1
779
+ 777 1
780
+ 778 0
781
+ 779 1
782
+ 780 0
783
+ 781 0
784
+ 782 1
785
+ 783 0
786
+ 784 0
787
+ 785 1
788
+ 786 1
789
+ 787 0
790
+ 788 1
791
+ 789 1
792
+ 790 1
793
+ 791 1
794
+ 792 1
795
+ 793 1
796
+ 794 1
797
+ 795 1
798
+ 796 0
799
+ 797 0
800
+ 798 1
801
+ 799 0
802
+ 800 0
803
+ 801 0
804
+ 802 1
805
+ 803 1
806
+ 804 0
807
+ 805 0
808
+ 806 1
809
+ 807 0
810
+ 808 1
811
+ 809 1
812
+ 810 1
813
+ 811 1
814
+ 812 0
815
+ 813 1
816
+ 814 0
817
+ 815 1
818
+ 816 0
819
+ 817 0
820
+ 818 0
821
+ 819 1
822
+ 820 0
823
+ 821 1
824
+ 822 0
825
+ 823 1
826
+ 824 1
827
+ 825 1
828
+ 826 1
829
+ 827 0
830
+ 828 0
831
+ 829 1
832
+ 830 1
833
+ 831 1
834
+ 832 1
835
+ 833 0
836
+ 834 1
837
+ 835 0
838
+ 836 1
839
+ 837 1
840
+ 838 1
841
+ 839 0
842
+ 840 0
843
+ 841 0
844
+ 842 1
845
+ 843 1
846
+ 844 0
847
+ 845 1
848
+ 846 1
849
+ 847 1
850
+ 848 0
851
+ 849 1
852
+ 850 1
853
+ 851 0
854
+ 852 1
855
+ 853 0
856
+ 854 1
857
+ 855 0
858
+ 856 1
859
+ 857 1
860
+ 858 0
861
+ 859 1
862
+ 860 1
863
+ 861 1
864
+ 862 0
865
+ 863 1
866
+ 864 1
867
+ 865 1
868
+ 866 0
869
+ 867 1
870
+ 868 1
871
+ 869 1
872
+ 870 0
873
+ 871 1
874
+ 872 1
875
+ 873 1
876
+ 874 0
877
+ 875 0
878
+ 876 1
879
+ 877 0
880
+ 878 0
881
+ 879 0
882
+ 880 1
883
+ 881 1
884
+ 882 1
885
+ 883 1
886
+ 884 0
887
+ 885 1
888
+ 886 1
889
+ 887 1
890
+ 888 0
891
+ 889 1
892
+ 890 1
893
+ 891 1
894
+ 892 0
895
+ 893 0
896
+ 894 0
897
+ 895 0
898
+ 896 0
899
+ 897 1
900
+ 898 1
901
+ 899 1
902
+ 900 1
903
+ 901 1
904
+ 902 0
905
+ 903 1
906
+ 904 1
907
+ 905 1
908
+ 906 1
909
+ 907 1
910
+ 908 1
911
+ 909 1
912
+ 910 0
913
+ 911 1
914
+ 912 0
915
+ 913 0
916
+ 914 1
917
+ 915 1
918
+ 916 1
919
+ 917 1
920
+ 918 0
921
+ 919 1
922
+ 920 1
923
+ 921 1
924
+ 922 1
925
+ 923 0
926
+ 924 1
927
+ 925 1
928
+ 926 1
929
+ 927 0
930
+ 928 1
931
+ 929 1
932
+ 930 1
933
+ 931 0
934
+ 932 0
935
+ 933 0
936
+ 934 0
937
+ 935 1
938
+ 936 0
939
+ 937 0
940
+ 938 1
941
+ 939 1
942
+ 940 1
943
+ 941 1
944
+ 942 0
945
+ 943 0
946
+ 944 1
947
+ 945 0
948
+ 946 1
949
+ 947 1
950
+ 948 1
951
+ 949 1
952
+ 950 0
953
+ 951 1
954
+ 952 1
955
+ 953 1
956
+ 954 1
957
+ 955 1
958
+ 956 1
959
+ 957 1
960
+ 958 1
961
+ 959 1
962
+ 960 1
963
+ 961 1
964
+ 962 1
965
+ 963 0
966
+ 964 0
967
+ 965 1
968
+ 966 1
969
+ 967 1
970
+ 968 1
971
+ 969 1
972
+ 970 1
973
+ 971 0
974
+ 972 1
975
+ 973 1
976
+ 974 1
977
+ 975 1
978
+ 976 1
979
+ 977 1
980
+ 978 1
981
+ 979 1
982
+ 980 1
983
+ 981 1
984
+ 982 0
985
+ 983 1
986
+ 984 0
987
+ 985 1
988
+ 986 1
989
+ 987 0
990
+ 988 1
991
+ 989 0
992
+ 990 0
993
+ 991 1
994
+ 992 1
995
+ 993 1
996
+ 994 0
997
+ 995 1
998
+ 996 1
999
+ 997 1
1000
+ 998 1
1001
+ 999 1
1002
+ 1000 0
1003
+ 1001 1
1004
+ 1002 0
1005
+ 1003 0
1006
+ 1004 1
1007
+ 1005 1
1008
+ 1006 1
1009
+ 1007 0
1010
+ 1008 1
1011
+ 1009 0
1012
+ 1010 1
1013
+ 1011 0
1014
+ 1012 1
1015
+ 1013 1
1016
+ 1014 1
1017
+ 1015 1
1018
+ 1016 1
1019
+ 1017 1
1020
+ 1018 1
1021
+ 1019 1
1022
+ 1020 1
1023
+ 1021 1
1024
+ 1022 1
1025
+ 1023 0
1026
+ 1024 1
1027
+ 1025 0
1028
+ 1026 0
1029
+ 1027 1
1030
+ 1028 0
1031
+ 1029 1
1032
+ 1030 0
1033
+ 1031 1
1034
+ 1032 1
1035
+ 1033 1
1036
+ 1034 1
1037
+ 1035 0
1038
+ 1036 1
1039
+ 1037 1
1040
+ 1038 1
1041
+ 1039 0
1042
+ 1040 0
1043
+ 1041 1
1044
+ 1042 0
1045
+ 1043 0
1046
+ 1044 1
1047
+ 1045 1
1048
+ 1046 0
1049
+ 1047 1
1050
+ 1048 1
1051
+ 1049 1
1052
+ 1050 1
1053
+ 1051 1
1054
+ 1052 1
1055
+ 1053 1
1056
+ 1054 0
1057
+ 1055 1
1058
+ 1056 1
1059
+ 1057 1
1060
+ 1058 1
1061
+ 1059 1
1062
+ 1060 1
1063
+ 1061 1
1064
+ 1062 1
1065
+ 1063 1
1066
+ 1064 1
1067
+ 1065 1
1068
+ 1066 1
1069
+ 1067 1
1070
+ 1068 0
1071
+ 1069 1
1072
+ 1070 1
1073
+ 1071 1
1074
+ 1072 1
1075
+ 1073 1
1076
+ 1074 1
1077
+ 1075 1
1078
+ 1076 1
1079
+ 1077 1
1080
+ 1078 1
1081
+ 1079 1
1082
+ 1080 0
1083
+ 1081 0
1084
+ 1082 1
1085
+ 1083 1
1086
+ 1084 1
1087
+ 1085 1
1088
+ 1086 1
1089
+ 1087 0
1090
+ 1088 1
1091
+ 1089 1
1092
+ 1090 1
1093
+ 1091 0
1094
+ 1092 1
1095
+ 1093 1
1096
+ 1094 1
1097
+ 1095 1
1098
+ 1096 1
1099
+ 1097 1
1100
+ 1098 1
1101
+ 1099 1
1102
+ 1100 1
1103
+ 1101 1
1104
+ 1102 0
1105
+ 1103 1
1106
+ 1104 1
1107
+ 1105 0
1108
+ 1106 1
1109
+ 1107 0
1110
+ 1108 1
1111
+ 1109 1
1112
+ 1110 0
1113
+ 1111 1
1114
+ 1112 0
1115
+ 1113 0
1116
+ 1114 1
1117
+ 1115 1
1118
+ 1116 0
1119
+ 1117 1
1120
+ 1118 1
1121
+ 1119 1
1122
+ 1120 0
1123
+ 1121 0
1124
+ 1122 1
1125
+ 1123 1
1126
+ 1124 0
1127
+ 1125 1
1128
+ 1126 0
1129
+ 1127 0
1130
+ 1128 1
1131
+ 1129 1
1132
+ 1130 1
1133
+ 1131 1
1134
+ 1132 0
1135
+ 1133 1
1136
+ 1134 0
1137
+ 1135 1
1138
+ 1136 0
1139
+ 1137 1
1140
+ 1138 0
1141
+ 1139 0
1142
+ 1140 0
1143
+ 1141 1
1144
+ 1142 1
1145
+ 1143 1
1146
+ 1144 0
1147
+ 1145 1
1148
+ 1146 1
1149
+ 1147 0
1150
+ 1148 1
1151
+ 1149 1
1152
+ 1150 0
1153
+ 1151 1
1154
+ 1152 1
1155
+ 1153 0
1156
+ 1154 0
1157
+ 1155 1
1158
+ 1156 1
1159
+ 1157 0
1160
+ 1158 0
1161
+ 1159 1
1162
+ 1160 0
1163
+ 1161 0
1164
+ 1162 1
1165
+ 1163 1
1166
+ 1164 1
1167
+ 1165 0
1168
+ 1166 0
1169
+ 1167 1
1170
+ 1168 1
1171
+ 1169 1
1172
+ 1170 0
1173
+ 1171 1
1174
+ 1172 1
1175
+ 1173 1
1176
+ 1174 1
1177
+ 1175 0
1178
+ 1176 1
1179
+ 1177 0
1180
+ 1178 1
1181
+ 1179 1
1182
+ 1180 1
1183
+ 1181 1
1184
+ 1182 1
1185
+ 1183 1
1186
+ 1184 1
1187
+ 1185 1
1188
+ 1186 0
1189
+ 1187 1
1190
+ 1188 1
1191
+ 1189 0
1192
+ 1190 0
1193
+ 1191 0
1194
+ 1192 1
1195
+ 1193 1
1196
+ 1194 1
1197
+ 1195 1
1198
+ 1196 0
1199
+ 1197 1
1200
+ 1198 1
1201
+ 1199 1
1202
+ 1200 0
1203
+ 1201 1
1204
+ 1202 0
1205
+ 1203 1
1206
+ 1204 0
1207
+ 1205 1
1208
+ 1206 0
1209
+ 1207 0
1210
+ 1208 1
1211
+ 1209 1
1212
+ 1210 1
1213
+ 1211 1
1214
+ 1212 1
1215
+ 1213 1
1216
+ 1214 1
1217
+ 1215 1
1218
+ 1216 1
1219
+ 1217 1
1220
+ 1218 1
1221
+ 1219 1
1222
+ 1220 1
1223
+ 1221 0
1224
+ 1222 1
1225
+ 1223 1
1226
+ 1224 0
1227
+ 1225 1
1228
+ 1226 0
1229
+ 1227 1
1230
+ 1228 1
1231
+ 1229 1
1232
+ 1230 0
1233
+ 1231 0
1234
+ 1232 1
1235
+ 1233 1
1236
+ 1234 1
1237
+ 1235 1
1238
+ 1236 1
1239
+ 1237 1
1240
+ 1238 1
1241
+ 1239 1
1242
+ 1240 1
1243
+ 1241 0
1244
+ 1242 1
1245
+ 1243 1
1246
+ 1244 1
1247
+ 1245 1
1248
+ 1246 0
1249
+ 1247 0
1250
+ 1248 1
1251
+ 1249 1
1252
+ 1250 1
1253
+ 1251 1
1254
+ 1252 1
1255
+ 1253 1
1256
+ 1254 1
1257
+ 1255 1
1258
+ 1256 1
1259
+ 1257 1
1260
+ 1258 0
1261
+ 1259 1
1262
+ 1260 0
1263
+ 1261 1
1264
+ 1262 1
1265
+ 1263 1
1266
+ 1264 1
1267
+ 1265 1
1268
+ 1266 1
1269
+ 1267 1
1270
+ 1268 1
1271
+ 1269 1
1272
+ 1270 1
1273
+ 1271 1
1274
+ 1272 0
1275
+ 1273 0
1276
+ 1274 0
1277
+ 1275 1
1278
+ 1276 0
1279
+ 1277 1
1280
+ 1278 1
1281
+ 1279 0
1282
+ 1280 1
1283
+ 1281 0
1284
+ 1282 1
1285
+ 1283 0
1286
+ 1284 1
1287
+ 1285 1
1288
+ 1286 1
1289
+ 1287 0
1290
+ 1288 1
1291
+ 1289 1
1292
+ 1290 1
1293
+ 1291 1
1294
+ 1292 0
1295
+ 1293 1
1296
+ 1294 0
1297
+ 1295 1
1298
+ 1296 1
1299
+ 1297 1
1300
+ 1298 1
1301
+ 1299 1
1302
+ 1300 0
1303
+ 1301 0
1304
+ 1302 1
1305
+ 1303 1
1306
+ 1304 1
1307
+ 1305 0
1308
+ 1306 0
1309
+ 1307 1
1310
+ 1308 1
1311
+ 1309 0
1312
+ 1310 1
1313
+ 1311 1
1314
+ 1312 0
1315
+ 1313 1
1316
+ 1314 1
1317
+ 1315 1
1318
+ 1316 1
1319
+ 1317 1
1320
+ 1318 0
1321
+ 1319 1
1322
+ 1320 1
1323
+ 1321 1
1324
+ 1322 1
1325
+ 1323 1
1326
+ 1324 1
1327
+ 1325 1
1328
+ 1326 0
1329
+ 1327 1
1330
+ 1328 1
1331
+ 1329 0
1332
+ 1330 1
1333
+ 1331 1
1334
+ 1332 1
1335
+ 1333 0
1336
+ 1334 1
1337
+ 1335 0
1338
+ 1336 0
1339
+ 1337 0
1340
+ 1338 0
1341
+ 1339 1
1342
+ 1340 0
1343
+ 1341 0
1344
+ 1342 0
1345
+ 1343 0
1346
+ 1344 1
1347
+ 1345 1
1348
+ 1346 1
1349
+ 1347 1
1350
+ 1348 1
1351
+ 1349 1
1352
+ 1350 1
1353
+ 1351 1
1354
+ 1352 1
1355
+ 1353 0
1356
+ 1354 0
1357
+ 1355 0
1358
+ 1356 1
1359
+ 1357 1
1360
+ 1358 0
1361
+ 1359 1
1362
+ 1360 1
1363
+ 1361 1
1364
+ 1362 1
1365
+ 1363 0
1366
+ 1364 1
1367
+ 1365 1
1368
+ 1366 0
1369
+ 1367 0
1370
+ 1368 1
1371
+ 1369 1
1372
+ 1370 0
1373
+ 1371 0
1374
+ 1372 1
1375
+ 1373 1
1376
+ 1374 0
1377
+ 1375 1
1378
+ 1376 0
1379
+ 1377 1
1380
+ 1378 1
1381
+ 1379 1
1382
+ 1380 1
1383
+ 1381 1
1384
+ 1382 1
1385
+ 1383 0
1386
+ 1384 1
1387
+ 1385 1
1388
+ 1386 0
1389
+ 1387 1
1390
+ 1388 1
1391
+ 1389 1
1392
+ 1390 1
1393
+ 1391 0
1394
+ 1392 0
1395
+ 1393 1
1396
+ 1394 1
1397
+ 1395 1
1398
+ 1396 0
1399
+ 1397 1
1400
+ 1398 1
1401
+ 1399 0
1402
+ 1400 0
1403
+ 1401 0
1404
+ 1402 1
1405
+ 1403 0
1406
+ 1404 1
1407
+ 1405 0
1408
+ 1406 1
1409
+ 1407 1
1410
+ 1408 1
1411
+ 1409 1
1412
+ 1410 0
1413
+ 1411 1
1414
+ 1412 1
1415
+ 1413 1
1416
+ 1414 0
1417
+ 1415 1
1418
+ 1416 1
1419
+ 1417 0
1420
+ 1418 1
1421
+ 1419 1
1422
+ 1420 1
1423
+ 1421 1
1424
+ 1422 0
1425
+ 1423 0
1426
+ 1424 0
1427
+ 1425 1
1428
+ 1426 0
1429
+ 1427 0
1430
+ 1428 0
1431
+ 1429 1
1432
+ 1430 0
1433
+ 1431 1
1434
+ 1432 1
1435
+ 1433 0
1436
+ 1434 1
1437
+ 1435 0
1438
+ 1436 1
1439
+ 1437 1
1440
+ 1438 1
1441
+ 1439 1
1442
+ 1440 0
1443
+ 1441 1
1444
+ 1442 0
1445
+ 1443 0
1446
+ 1444 0
1447
+ 1445 1
1448
+ 1446 1
1449
+ 1447 1
1450
+ 1448 0
1451
+ 1449 1
1452
+ 1450 1
1453
+ 1451 0
1454
+ 1452 1
1455
+ 1453 1
1456
+ 1454 1
1457
+ 1455 1
1458
+ 1456 1
1459
+ 1457 1
1460
+ 1458 1
1461
+ 1459 1
1462
+ 1460 1
1463
+ 1461 1
1464
+ 1462 1
1465
+ 1463 0
1466
+ 1464 1
1467
+ 1465 1
1468
+ 1466 1
1469
+ 1467 0
1470
+ 1468 1
1471
+ 1469 0
1472
+ 1470 0
1473
+ 1471 1
1474
+ 1472 0
1475
+ 1473 0
1476
+ 1474 1
1477
+ 1475 1
1478
+ 1476 0
1479
+ 1477 1
1480
+ 1478 0
1481
+ 1479 0
1482
+ 1480 1
1483
+ 1481 0
1484
+ 1482 1
1485
+ 1483 0
1486
+ 1484 0
1487
+ 1485 1
1488
+ 1486 0
1489
+ 1487 0
1490
+ 1488 1
1491
+ 1489 1
1492
+ 1490 0
1493
+ 1491 1
1494
+ 1492 1
1495
+ 1493 0
1496
+ 1494 0
1497
+ 1495 1
1498
+ 1496 1
1499
+ 1497 0
1500
+ 1498 0
1501
+ 1499 0
1502
+ 1500 1
1503
+ 1501 1
1504
+ 1502 1
1505
+ 1503 0
1506
+ 1504 0
1507
+ 1505 1
1508
+ 1506 1
1509
+ 1507 1
1510
+ 1508 1
1511
+ 1509 0
1512
+ 1510 1
1513
+ 1511 1
1514
+ 1512 0
1515
+ 1513 1
1516
+ 1514 0
1517
+ 1515 1
1518
+ 1516 0
1519
+ 1517 1
1520
+ 1518 0
1521
+ 1519 0
1522
+ 1520 0
1523
+ 1521 0
1524
+ 1522 0
1525
+ 1523 1
1526
+ 1524 1
1527
+ 1525 1
1528
+ 1526 1
1529
+ 1527 1
1530
+ 1528 0
1531
+ 1529 1
1532
+ 1530 1
1533
+ 1531 1
1534
+ 1532 1
1535
+ 1533 1
1536
+ 1534 1
1537
+ 1535 0
1538
+ 1536 1
1539
+ 1537 0
1540
+ 1538 1
1541
+ 1539 0
1542
+ 1540 0
1543
+ 1541 1
1544
+ 1542 0
1545
+ 1543 1
1546
+ 1544 0
1547
+ 1545 0
1548
+ 1546 1
1549
+ 1547 0
1550
+ 1548 1
1551
+ 1549 1
1552
+ 1550 1
1553
+ 1551 0
1554
+ 1552 0
1555
+ 1553 0
1556
+ 1554 1
1557
+ 1555 1
1558
+ 1556 0
1559
+ 1557 1
1560
+ 1558 0
1561
+ 1559 1
1562
+ 1560 1
1563
+ 1561 0
1564
+ 1562 1
1565
+ 1563 1
1566
+ 1564 0
1567
+ 1565 0
1568
+ 1566 1
1569
+ 1567 1
1570
+ 1568 1
1571
+ 1569 1
1572
+ 1570 0
1573
+ 1571 1
1574
+ 1572 0
1575
+ 1573 0
1576
+ 1574 1
1577
+ 1575 1
1578
+ 1576 1
1579
+ 1577 0
1580
+ 1578 0
1581
+ 1579 1
1582
+ 1580 1
1583
+ 1581 1
1584
+ 1582 1
1585
+ 1583 1
1586
+ 1584 0
1587
+ 1585 1
1588
+ 1586 1
1589
+ 1587 1
1590
+ 1588 1
1591
+ 1589 1
1592
+ 1590 0
1593
+ 1591 1
1594
+ 1592 1
1595
+ 1593 0
1596
+ 1594 1
1597
+ 1595 1
1598
+ 1596 1
1599
+ 1597 0
1600
+ 1598 0
1601
+ 1599 1
1602
+ 1600 0
1603
+ 1601 1
1604
+ 1602 1
1605
+ 1603 0
1606
+ 1604 0
1607
+ 1605 0
1608
+ 1606 1
1609
+ 1607 1
1610
+ 1608 0
1611
+ 1609 1
1612
+ 1610 0
1613
+ 1611 1
1614
+ 1612 1
1615
+ 1613 1
1616
+ 1614 1
1617
+ 1615 1
1618
+ 1616 1
1619
+ 1617 0
1620
+ 1618 1
1621
+ 1619 1
1622
+ 1620 0
1623
+ 1621 0
1624
+ 1622 1
1625
+ 1623 1
1626
+ 1624 0
1627
+ 1625 0
1628
+ 1626 1
1629
+ 1627 1
1630
+ 1628 1
1631
+ 1629 0
1632
+ 1630 0
1633
+ 1631 0
1634
+ 1632 1
1635
+ 1633 1
1636
+ 1634 1
1637
+ 1635 1
1638
+ 1636 1
1639
+ 1637 1
1640
+ 1638 0
1641
+ 1639 0
1642
+ 1640 0
1643
+ 1641 1
1644
+ 1642 1
1645
+ 1643 1
1646
+ 1644 1
1647
+ 1645 1
1648
+ 1646 1
1649
+ 1647 0
1650
+ 1648 1
1651
+ 1649 1
1652
+ 1650 0
1653
+ 1651 0
1654
+ 1652 0
1655
+ 1653 1
1656
+ 1654 1
1657
+ 1655 1
1658
+ 1656 0
1659
+ 1657 0
1660
+ 1658 1
1661
+ 1659 0
1662
+ 1660 0
1663
+ 1661 1
1664
+ 1662 1
1665
+ 1663 1
1666
+ 1664 0
1667
+ 1665 0
1668
+ 1666 0
1669
+ 1667 1
1670
+ 1668 0
1671
+ 1669 1
1672
+ 1670 1
1673
+ 1671 0
1674
+ 1672 1
1675
+ 1673 1
1676
+ 1674 1
1677
+ 1675 1
1678
+ 1676 1
1679
+ 1677 1
1680
+ 1678 1
1681
+ 1679 1
1682
+ 1680 1
1683
+ 1681 1
1684
+ 1682 1
1685
+ 1683 0
1686
+ 1684 1
1687
+ 1685 1
1688
+ 1686 0
1689
+ 1687 0
1690
+ 1688 1
1691
+ 1689 1
1692
+ 1690 1
1693
+ 1691 0
1694
+ 1692 1
1695
+ 1693 1
1696
+ 1694 0
1697
+ 1695 1
1698
+ 1696 1
1699
+ 1697 0
1700
+ 1698 0
1701
+ 1699 1
1702
+ 1700 0
1703
+ 1701 1
1704
+ 1702 0
1705
+ 1703 1
1706
+ 1704 1
1707
+ 1705 1
1708
+ 1706 0
1709
+ 1707 0
1710
+ 1708 1
1711
+ 1709 0
1712
+ 1710 1
1713
+ 1711 1
1714
+ 1712 1
1715
+ 1713 0
1716
+ 1714 1
1717
+ 1715 1
1718
+ 1716 1
1719
+ 1717 1
1720
+ 1718 0
1721
+ 1719 1
1722
+ 1720 0
1723
+ 1721 0
1724
+ 1722 0
1725
+ 1723 1
1726
+ 1724 1
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/all_results.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 30.0,
3
+ "eval_accuracy": 0.9093137254901961,
4
+ "eval_combined_score": 0.9221436817257659,
5
+ "eval_f1": 0.9349736379613357,
6
+ "eval_loss": 0.6781179308891296,
7
+ "eval_runtime": 0.5908,
8
+ "eval_samples": 408,
9
+ "eval_samples_per_second": 690.621,
10
+ "eval_steps_per_second": 1.693
11
+ }
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/eval_results.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 30.0,
3
+ "eval_accuracy": 0.9093137254901961,
4
+ "eval_combined_score": 0.9221436817257659,
5
+ "eval_f1": 0.9349736379613357,
6
+ "eval_loss": 0.6781179308891296,
7
+ "eval_runtime": 0.5908,
8
+ "eval_samples": 408,
9
+ "eval_samples_per_second": 690.621,
10
+ "eval_steps_per_second": 1.693
11
+ }
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "T": 1.0,
3
+ "base_model_name_or_path": "microsoft/deberta-v3-base",
4
+ "bias": "none",
5
+ "drop_out": 0.1,
6
+ "inference_mode": false,
7
+ "layers_to_transform": null,
8
+ "modules_to_save": [
9
+ "classifier",
10
+ "pooler"
11
+ ],
12
+ "num_rotations": 1,
13
+ "peft_type": "ROTATION",
14
+ "r": 4,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "attention.output.dense",
18
+ "query_proj",
19
+ "intermediate.dense",
20
+ "key_proj",
21
+ "output.dense",
22
+ "value_proj"
23
+ ],
24
+ "target_modules_to_skip": null,
25
+ "task_type": "SEQ_CLS"
26
+ }
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[MASK]": 128000
3
+ }
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[SEP]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": {
9
+ "content": "[UNK]",
10
+ "lstrip": false,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ }
15
+ }
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
3
+ size 2464616
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft/tokenizer_config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[CLS]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[SEP]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[UNK]",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128000": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "bos_token": "[CLS]",
45
+ "clean_up_tokenization_spaces": false,
46
+ "cls_token": "[CLS]",
47
+ "do_lower_case": false,
48
+ "eos_token": "[SEP]",
49
+ "extra_special_tokens": {},
50
+ "mask_token": "[MASK]",
51
+ "model_max_length": 512,
52
+ "pad_token": "[PAD]",
53
+ "padding_side": "right",
54
+ "sep_token": "[SEP]",
55
+ "sp_model_kwargs": {},
56
+ "split_by_punct": false,
57
+ "tokenizer_class": "DebertaV2Tokenizer",
58
+ "unk_token": "[UNK]",
59
+ "vocab_type": "spm"
60
+ }
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft2/adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "T": 1.0,
3
+ "base_model_name_or_path": "microsoft/deberta-v3-base",
4
+ "bias": "none",
5
+ "drop_out": 0.1,
6
+ "inference_mode": true,
7
+ "layers_to_transform": null,
8
+ "modules_to_save": [
9
+ "classifier",
10
+ "pooler"
11
+ ],
12
+ "num_rotations": 1,
13
+ "peft_type": "ROTATION",
14
+ "r": 4,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "attention.output.dense",
18
+ "query_proj",
19
+ "intermediate.dense",
20
+ "key_proj",
21
+ "output.dense",
22
+ "value_proj"
23
+ ],
24
+ "target_modules_to_skip": null,
25
+ "task_type": "SEQ_CLS"
26
+ }
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/ft2/adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2dcdd19e8394363f6c95f5e8945212598c8421cd08f2012465c1a7cef07f05b7
3
+ size 7449859
nlu/glue_exp/mrpc/3dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/trainer_state.json ADDED
@@ -0,0 +1,655 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 2400,
3
+ "best_metric": 0.9093137254901961,
4
+ "best_model_checkpoint": "./glue_exp/mrpc/dr0.1,mlr2e-04,clr2e-03,ep=30.0t=18d22h26m32/checkpoint-2400",
5
+ "epoch": 30.0,
6
+ "eval_steps": 100,
7
+ "global_step": 3450,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.8695652173913043,
14
+ "grad_norm": 9.207517623901367,
15
+ "learning_rate": 0.00198,
16
+ "loss": 0.6035,
17
+ "step": 100
18
+ },
19
+ {
20
+ "epoch": 0.8695652173913043,
21
+ "eval_accuracy": 0.8014705882352942,
22
+ "eval_combined_score": 0.8220945174186179,
23
+ "eval_f1": 0.8427184466019417,
24
+ "eval_loss": 0.48600342869758606,
25
+ "eval_runtime": 0.7374,
26
+ "eval_samples_per_second": 553.332,
27
+ "eval_steps_per_second": 1.356,
28
+ "step": 100
29
+ },
30
+ {
31
+ "epoch": 1.7391304347826086,
32
+ "grad_norm": 2.2224478721618652,
33
+ "learning_rate": 0.0019956152348614225,
34
+ "loss": 0.4063,
35
+ "step": 200
36
+ },
37
+ {
38
+ "epoch": 1.7391304347826086,
39
+ "eval_accuracy": 0.8602941176470589,
40
+ "eval_combined_score": 0.8820863505604604,
41
+ "eval_f1": 0.9038785834738617,
42
+ "eval_loss": 0.35205960273742676,
43
+ "eval_runtime": 0.6419,
44
+ "eval_samples_per_second": 635.594,
45
+ "eval_steps_per_second": 1.558,
46
+ "step": 200
47
+ },
48
+ {
49
+ "epoch": 2.608695652173913,
50
+ "grad_norm": 3.051316261291504,
51
+ "learning_rate": 0.0019823226955326743,
52
+ "loss": 0.3156,
53
+ "step": 300
54
+ },
55
+ {
56
+ "epoch": 2.608695652173913,
57
+ "eval_accuracy": 0.8774509803921569,
58
+ "eval_combined_score": 0.8963526088401462,
59
+ "eval_f1": 0.9152542372881356,
60
+ "eval_loss": 0.2953622341156006,
61
+ "eval_runtime": 0.6484,
62
+ "eval_samples_per_second": 629.277,
63
+ "eval_steps_per_second": 1.542,
64
+ "step": 300
65
+ },
66
+ {
67
+ "epoch": 3.4782608695652173,
68
+ "grad_norm": 2.0906338691711426,
69
+ "learning_rate": 0.0019602408686963785,
70
+ "loss": 0.251,
71
+ "step": 400
72
+ },
73
+ {
74
+ "epoch": 3.4782608695652173,
75
+ "eval_accuracy": 0.8823529411764706,
76
+ "eval_combined_score": 0.9006359300476947,
77
+ "eval_f1": 0.918918918918919,
78
+ "eval_loss": 0.2901740074157715,
79
+ "eval_runtime": 1.1062,
80
+ "eval_samples_per_second": 368.828,
81
+ "eval_steps_per_second": 0.904,
82
+ "step": 400
83
+ },
84
+ {
85
+ "epoch": 4.3478260869565215,
86
+ "grad_norm": 2.9156606197357178,
87
+ "learning_rate": 0.0019295673304908422,
88
+ "loss": 0.2437,
89
+ "step": 500
90
+ },
91
+ {
92
+ "epoch": 4.3478260869565215,
93
+ "eval_accuracy": 0.8970588235294118,
94
+ "eval_combined_score": 0.9120710784313726,
95
+ "eval_f1": 0.9270833333333334,
96
+ "eval_loss": 0.27514174580574036,
97
+ "eval_runtime": 0.5936,
98
+ "eval_samples_per_second": 687.364,
99
+ "eval_steps_per_second": 1.685,
100
+ "step": 500
101
+ },
102
+ {
103
+ "epoch": 5.217391304347826,
104
+ "grad_norm": 10.352530479431152,
105
+ "learning_rate": 0.001890576530999922,
106
+ "loss": 0.1758,
107
+ "step": 600
108
+ },
109
+ {
110
+ "epoch": 5.217391304347826,
111
+ "eval_accuracy": 0.9019607843137255,
112
+ "eval_combined_score": 0.916733816814397,
113
+ "eval_f1": 0.9315068493150684,
114
+ "eval_loss": 0.3504975140094757,
115
+ "eval_runtime": 0.6425,
116
+ "eval_samples_per_second": 635.031,
117
+ "eval_steps_per_second": 1.556,
118
+ "step": 600
119
+ },
120
+ {
121
+ "epoch": 6.086956521739131,
122
+ "grad_norm": 3.138051748275757,
123
+ "learning_rate": 0.0018436173386234143,
124
+ "loss": 0.1481,
125
+ "step": 700
126
+ },
127
+ {
128
+ "epoch": 6.086956521739131,
129
+ "eval_accuracy": 0.8651960784313726,
130
+ "eval_combined_score": 0.8866881894661036,
131
+ "eval_f1": 0.9081803005008348,
132
+ "eval_loss": 0.5320879220962524,
133
+ "eval_runtime": 0.6489,
134
+ "eval_samples_per_second": 628.73,
135
+ "eval_steps_per_second": 1.541,
136
+ "step": 700
137
+ },
138
+ {
139
+ "epoch": 6.956521739130435,
140
+ "grad_norm": 8.193928718566895,
141
+ "learning_rate": 0.001789109918592965,
142
+ "loss": 0.1325,
143
+ "step": 800
144
+ },
145
+ {
146
+ "epoch": 6.956521739130435,
147
+ "eval_accuracy": 0.9019607843137255,
148
+ "eval_combined_score": 0.916733816814397,
149
+ "eval_f1": 0.9315068493150684,
150
+ "eval_loss": 0.4476175606250763,
151
+ "eval_runtime": 0.6464,
152
+ "eval_samples_per_second": 631.174,
153
+ "eval_steps_per_second": 1.547,
154
+ "step": 800
155
+ },
156
+ {
157
+ "epoch": 7.826086956521739,
158
+ "grad_norm": 3.793989419937134,
159
+ "learning_rate": 0.001727541973562826,
160
+ "loss": 0.0984,
161
+ "step": 900
162
+ },
163
+ {
164
+ "epoch": 7.826086956521739,
165
+ "eval_accuracy": 0.8897058823529411,
166
+ "eval_combined_score": 0.9062594591867621,
167
+ "eval_f1": 0.9228130360205832,
168
+ "eval_loss": 0.4270480275154114,
169
+ "eval_runtime": 0.6437,
170
+ "eval_samples_per_second": 633.797,
171
+ "eval_steps_per_second": 1.553,
172
+ "step": 900
173
+ },
174
+ {
175
+ "epoch": 8.695652173913043,
176
+ "grad_norm": 4.246761322021484,
177
+ "learning_rate": 0.001659464379912601,
178
+ "loss": 0.0718,
179
+ "step": 1000
180
+ },
181
+ {
182
+ "epoch": 8.695652173913043,
183
+ "eval_accuracy": 0.8897058823529411,
184
+ "eval_combined_score": 0.9062594591867621,
185
+ "eval_f1": 0.9228130360205832,
186
+ "eval_loss": 0.4241102635860443,
187
+ "eval_runtime": 0.6465,
188
+ "eval_samples_per_second": 631.11,
189
+ "eval_steps_per_second": 1.547,
190
+ "step": 1000
191
+ },
192
+ {
193
+ "epoch": 9.565217391304348,
194
+ "grad_norm": 2.9366676807403564,
195
+ "learning_rate": 0.0015854862588059726,
196
+ "loss": 0.0655,
197
+ "step": 1100
198
+ },
199
+ {
200
+ "epoch": 9.565217391304348,
201
+ "eval_accuracy": 0.8995098039215687,
202
+ "eval_combined_score": 0.9141027280477408,
203
+ "eval_f1": 0.928695652173913,
204
+ "eval_loss": 0.5752078294754028,
205
+ "eval_runtime": 0.6378,
206
+ "eval_samples_per_second": 639.665,
207
+ "eval_steps_per_second": 1.568,
208
+ "step": 1100
209
+ },
210
+ {
211
+ "epoch": 10.434782608695652,
212
+ "grad_norm": 8.673500061035156,
213
+ "learning_rate": 0.0015062695261068735,
214
+ "loss": 0.0697,
215
+ "step": 1200
216
+ },
217
+ {
218
+ "epoch": 10.434782608695652,
219
+ "eval_accuracy": 0.8897058823529411,
220
+ "eval_combined_score": 0.9044579681064526,
221
+ "eval_f1": 0.9192100538599641,
222
+ "eval_loss": 0.5210642218589783,
223
+ "eval_runtime": 0.6405,
224
+ "eval_samples_per_second": 636.968,
225
+ "eval_steps_per_second": 1.561,
226
+ "step": 1200
227
+ },
228
+ {
229
+ "epoch": 11.304347826086957,
230
+ "grad_norm": 18.172353744506836,
231
+ "learning_rate": 0.0014225229699174897,
232
+ "loss": 0.0368,
233
+ "step": 1300
234
+ },
235
+ {
236
+ "epoch": 11.304347826086957,
237
+ "eval_accuracy": 0.8848039215686274,
238
+ "eval_combined_score": 0.9000596184419714,
239
+ "eval_f1": 0.9153153153153153,
240
+ "eval_loss": 0.8053569197654724,
241
+ "eval_runtime": 0.6492,
242
+ "eval_samples_per_second": 628.429,
243
+ "eval_steps_per_second": 1.54,
244
+ "step": 1300
245
+ },
246
+ {
247
+ "epoch": 12.173913043478262,
248
+ "grad_norm": 6.8164215087890625,
249
+ "learning_rate": 0.0013349959087290495,
250
+ "loss": 0.0495,
251
+ "step": 1400
252
+ },
253
+ {
254
+ "epoch": 12.173913043478262,
255
+ "eval_accuracy": 0.8921568627450981,
256
+ "eval_combined_score": 0.9085357692565081,
257
+ "eval_f1": 0.9249146757679181,
258
+ "eval_loss": 0.613058865070343,
259
+ "eval_runtime": 0.636,
260
+ "eval_samples_per_second": 641.507,
261
+ "eval_steps_per_second": 1.572,
262
+ "step": 1400
263
+ },
264
+ {
265
+ "epoch": 13.043478260869565,
266
+ "grad_norm": 2.474440097808838,
267
+ "learning_rate": 0.001244471486928804,
268
+ "loss": 0.041,
269
+ "step": 1500
270
+ },
271
+ {
272
+ "epoch": 13.043478260869565,
273
+ "eval_accuracy": 0.9019607843137255,
274
+ "eval_combined_score": 0.9156447031109264,
275
+ "eval_f1": 0.9293286219081273,
276
+ "eval_loss": 0.5219125151634216,
277
+ "eval_runtime": 0.6377,
278
+ "eval_samples_per_second": 639.775,
279
+ "eval_steps_per_second": 1.568,
280
+ "step": 1500
281
+ },
282
+ {
283
+ "epoch": 13.91304347826087,
284
+ "grad_norm": 1.85358464717865,
285
+ "learning_rate": 0.0011517596676513472,
286
+ "loss": 0.0291,
287
+ "step": 1600
288
+ },
289
+ {
290
+ "epoch": 13.91304347826087,
291
+ "eval_accuracy": 0.8995098039215687,
292
+ "eval_combined_score": 0.9141027280477408,
293
+ "eval_f1": 0.928695652173913,
294
+ "eval_loss": 0.6180567741394043,
295
+ "eval_runtime": 0.6405,
296
+ "eval_samples_per_second": 637.046,
297
+ "eval_steps_per_second": 1.561,
298
+ "step": 1600
299
+ },
300
+ {
301
+ "epoch": 14.782608695652174,
302
+ "grad_norm": 0.012138199992477894,
303
+ "learning_rate": 0.001057689985670419,
304
+ "loss": 0.0245,
305
+ "step": 1700
306
+ },
307
+ {
308
+ "epoch": 14.782608695652174,
309
+ "eval_accuracy": 0.8970588235294118,
310
+ "eval_combined_score": 0.9111628637220013,
311
+ "eval_f1": 0.9252669039145908,
312
+ "eval_loss": 0.6767077445983887,
313
+ "eval_runtime": 0.6449,
314
+ "eval_samples_per_second": 632.646,
315
+ "eval_steps_per_second": 1.551,
316
+ "step": 1700
317
+ },
318
+ {
319
+ "epoch": 15.652173913043478,
320
+ "grad_norm": 0.6696063280105591,
321
+ "learning_rate": 0.0009631041251743559,
322
+ "loss": 0.0248,
323
+ "step": 1800
324
+ },
325
+ {
326
+ "epoch": 15.652173913043478,
327
+ "eval_accuracy": 0.9068627450980392,
328
+ "eval_combined_score": 0.9197434292866082,
329
+ "eval_f1": 0.9326241134751773,
330
+ "eval_loss": 0.6179582476615906,
331
+ "eval_runtime": 0.6502,
332
+ "eval_samples_per_second": 627.542,
333
+ "eval_steps_per_second": 1.538,
334
+ "step": 1800
335
+ },
336
+ {
337
+ "epoch": 16.52173913043478,
338
+ "grad_norm": 0.008201290853321552,
339
+ "learning_rate": 0.0008688483888352111,
340
+ "loss": 0.025,
341
+ "step": 1900
342
+ },
343
+ {
344
+ "epoch": 16.52173913043478,
345
+ "eval_accuracy": 0.8921568627450981,
346
+ "eval_combined_score": 0.906651908075058,
347
+ "eval_f1": 0.921146953405018,
348
+ "eval_loss": 0.601223349571228,
349
+ "eval_runtime": 0.6449,
350
+ "eval_samples_per_second": 632.69,
351
+ "eval_steps_per_second": 1.551,
352
+ "step": 1900
353
+ },
354
+ {
355
+ "epoch": 17.391304347826086,
356
+ "grad_norm": 0.0032772510312497616,
357
+ "learning_rate": 0.000775766125554205,
358
+ "loss": 0.0166,
359
+ "step": 2000
360
+ },
361
+ {
362
+ "epoch": 17.391304347826086,
363
+ "eval_accuracy": 0.9044117647058824,
364
+ "eval_combined_score": 0.9175700031344687,
365
+ "eval_f1": 0.9307282415630551,
366
+ "eval_loss": 0.5948611497879028,
367
+ "eval_runtime": 0.6444,
368
+ "eval_samples_per_second": 633.184,
369
+ "eval_steps_per_second": 1.552,
370
+ "step": 2000
371
+ },
372
+ {
373
+ "epoch": 18.26086956521739,
374
+ "grad_norm": 0.004750708118081093,
375
+ "learning_rate": 0.0006846901846358999,
376
+ "loss": 0.0216,
377
+ "step": 2100
378
+ },
379
+ {
380
+ "epoch": 18.26086956521739,
381
+ "eval_accuracy": 0.9019607843137255,
382
+ "eval_combined_score": 0.9158926728586172,
383
+ "eval_f1": 0.9298245614035088,
384
+ "eval_loss": 0.6154604554176331,
385
+ "eval_runtime": 0.6408,
386
+ "eval_samples_per_second": 636.704,
387
+ "eval_steps_per_second": 1.561,
388
+ "step": 2100
389
+ },
390
+ {
391
+ "epoch": 19.130434782608695,
392
+ "grad_norm": 4.844301700592041,
393
+ "learning_rate": 0.0005964354639070397,
394
+ "loss": 0.0138,
395
+ "step": 2200
396
+ },
397
+ {
398
+ "epoch": 19.130434782608695,
399
+ "eval_accuracy": 0.9068627450980392,
400
+ "eval_combined_score": 0.91986246795538,
401
+ "eval_f1": 0.9328621908127208,
402
+ "eval_loss": 0.6498541831970215,
403
+ "eval_runtime": 0.6425,
404
+ "eval_samples_per_second": 635.066,
405
+ "eval_steps_per_second": 1.557,
406
+ "step": 2200
407
+ },
408
+ {
409
+ "epoch": 20.0,
410
+ "grad_norm": 0.0014483303530141711,
411
+ "learning_rate": 0.0005117916184554203,
412
+ "loss": 0.013,
413
+ "step": 2300
414
+ },
415
+ {
416
+ "epoch": 20.0,
417
+ "eval_accuracy": 0.9019607843137255,
418
+ "eval_combined_score": 0.9160153571918278,
419
+ "eval_f1": 0.9300699300699301,
420
+ "eval_loss": 0.735741138458252,
421
+ "eval_runtime": 0.643,
422
+ "eval_samples_per_second": 634.57,
423
+ "eval_steps_per_second": 1.555,
424
+ "step": 2300
425
+ },
426
+ {
427
+ "epoch": 20.869565217391305,
428
+ "grad_norm": 1.6116704940795898,
429
+ "learning_rate": 0.0004315159952270119,
430
+ "loss": 0.0112,
431
+ "step": 2400
432
+ },
433
+ {
434
+ "epoch": 20.869565217391305,
435
+ "eval_accuracy": 0.9093137254901961,
436
+ "eval_combined_score": 0.9221436817257659,
437
+ "eval_f1": 0.9349736379613357,
438
+ "eval_loss": 0.6781179308891296,
439
+ "eval_runtime": 0.6388,
440
+ "eval_samples_per_second": 638.67,
441
+ "eval_steps_per_second": 1.565,
442
+ "step": 2400
443
+ },
444
+ {
445
+ "epoch": 21.73913043478261,
446
+ "grad_norm": 2.125593662261963,
447
+ "learning_rate": 0.0003563268566987077,
448
+ "loss": 0.0159,
449
+ "step": 2500
450
+ },
451
+ {
452
+ "epoch": 21.73913043478261,
453
+ "eval_accuracy": 0.9044117647058824,
454
+ "eval_combined_score": 0.9182928388746803,
455
+ "eval_f1": 0.9321739130434783,
456
+ "eval_loss": 0.6766356825828552,
457
+ "eval_runtime": 0.6423,
458
+ "eval_samples_per_second": 635.222,
459
+ "eval_steps_per_second": 1.557,
460
+ "step": 2500
461
+ },
462
+ {
463
+ "epoch": 22.608695652173914,
464
+ "grad_norm": 0.004114523064345121,
465
+ "learning_rate": 0.0002868969542575783,
466
+ "loss": 0.0136,
467
+ "step": 2600
468
+ },
469
+ {
470
+ "epoch": 22.608695652173914,
471
+ "eval_accuracy": 0.9019607843137255,
472
+ "eval_combined_score": 0.9157691245512289,
473
+ "eval_f1": 0.9295774647887324,
474
+ "eval_loss": 0.6465615630149841,
475
+ "eval_runtime": 0.6393,
476
+ "eval_samples_per_second": 638.166,
477
+ "eval_steps_per_second": 1.564,
478
+ "step": 2600
479
+ },
480
+ {
481
+ "epoch": 23.47826086956522,
482
+ "grad_norm": 0.18351468443870544,
483
+ "learning_rate": 0.00022384750878852333,
484
+ "loss": 0.0069,
485
+ "step": 2700
486
+ },
487
+ {
488
+ "epoch": 23.47826086956522,
489
+ "eval_accuracy": 0.8970588235294118,
490
+ "eval_combined_score": 0.9111628637220013,
491
+ "eval_f1": 0.9252669039145908,
492
+ "eval_loss": 0.680911123752594,
493
+ "eval_runtime": 0.6413,
494
+ "eval_samples_per_second": 636.196,
495
+ "eval_steps_per_second": 1.559,
496
+ "step": 2700
497
+ },
498
+ {
499
+ "epoch": 24.347826086956523,
500
+ "grad_norm": 0.019911231473088264,
501
+ "learning_rate": 0.00016774265232874353,
502
+ "loss": 0.0088,
503
+ "step": 2800
504
+ },
505
+ {
506
+ "epoch": 24.347826086956523,
507
+ "eval_accuracy": 0.8995098039215687,
508
+ "eval_combined_score": 0.9135996991389148,
509
+ "eval_f1": 0.927689594356261,
510
+ "eval_loss": 0.715298056602478,
511
+ "eval_runtime": 0.6467,
512
+ "eval_samples_per_second": 630.906,
513
+ "eval_steps_per_second": 1.546,
514
+ "step": 2800
515
+ },
516
+ {
517
+ "epoch": 25.217391304347824,
518
+ "grad_norm": 0.0013759591383859515,
519
+ "learning_rate": 0.00011908438052207082,
520
+ "loss": 0.0075,
521
+ "step": 2900
522
+ },
523
+ {
524
+ "epoch": 25.217391304347824,
525
+ "eval_accuracy": 0.8995098039215687,
526
+ "eval_combined_score": 0.9133428238080312,
527
+ "eval_f1": 0.9271758436944938,
528
+ "eval_loss": 0.7201849222183228,
529
+ "eval_runtime": 0.5912,
530
+ "eval_samples_per_second": 690.116,
531
+ "eval_steps_per_second": 1.691,
532
+ "step": 2900
533
+ },
534
+ {
535
+ "epoch": 26.08695652173913,
536
+ "grad_norm": 10.307768821716309,
537
+ "learning_rate": 7.830806103584498e-05,
538
+ "loss": 0.0098,
539
+ "step": 3000
540
+ },
541
+ {
542
+ "epoch": 26.08695652173913,
543
+ "eval_accuracy": 0.8995098039215687,
544
+ "eval_combined_score": 0.9133428238080312,
545
+ "eval_f1": 0.9271758436944938,
546
+ "eval_loss": 0.7218621969223022,
547
+ "eval_runtime": 0.6462,
548
+ "eval_samples_per_second": 631.358,
549
+ "eval_steps_per_second": 1.547,
550
+ "step": 3000
551
+ },
552
+ {
553
+ "epoch": 26.956521739130434,
554
+ "grad_norm": 0.003199663246050477,
555
+ "learning_rate": 4.577853812857102e-05,
556
+ "loss": 0.003,
557
+ "step": 3100
558
+ },
559
+ {
560
+ "epoch": 26.956521739130434,
561
+ "eval_accuracy": 0.8970588235294118,
562
+ "eval_combined_score": 0.9112953692115144,
563
+ "eval_f1": 0.925531914893617,
564
+ "eval_loss": 0.7268513441085815,
565
+ "eval_runtime": 0.6394,
566
+ "eval_samples_per_second": 638.106,
567
+ "eval_steps_per_second": 1.564,
568
+ "step": 3100
569
+ },
570
+ {
571
+ "epoch": 27.82608695652174,
572
+ "grad_norm": 0.007014700211584568,
573
+ "learning_rate": 2.178686822255904e-05,
574
+ "loss": 0.006,
575
+ "step": 3200
576
+ },
577
+ {
578
+ "epoch": 27.82608695652174,
579
+ "eval_accuracy": 0.9019607843137255,
580
+ "eval_combined_score": 0.9156447031109264,
581
+ "eval_f1": 0.9293286219081273,
582
+ "eval_loss": 0.7328526377677917,
583
+ "eval_runtime": 0.6378,
584
+ "eval_samples_per_second": 639.702,
585
+ "eval_steps_per_second": 1.568,
586
+ "step": 3200
587
+ },
588
+ {
589
+ "epoch": 28.695652173913043,
590
+ "grad_norm": 0.006719864904880524,
591
+ "learning_rate": 6.547715689861789e-06,
592
+ "loss": 0.002,
593
+ "step": 3300
594
+ },
595
+ {
596
+ "epoch": 28.695652173913043,
597
+ "eval_accuracy": 0.9019607843137255,
598
+ "eval_combined_score": 0.9156447031109264,
599
+ "eval_f1": 0.9293286219081273,
600
+ "eval_loss": 0.735953688621521,
601
+ "eval_runtime": 0.648,
602
+ "eval_samples_per_second": 629.653,
603
+ "eval_steps_per_second": 1.543,
604
+ "step": 3300
605
+ },
606
+ {
607
+ "epoch": 29.565217391304348,
608
+ "grad_norm": 0.008782410994172096,
609
+ "learning_rate": 1.97432152599486e-07,
610
+ "loss": 0.0049,
611
+ "step": 3400
612
+ },
613
+ {
614
+ "epoch": 29.565217391304348,
615
+ "eval_accuracy": 0.9019607843137255,
616
+ "eval_combined_score": 0.9156447031109264,
617
+ "eval_f1": 0.9293286219081273,
618
+ "eval_loss": 0.7358721494674683,
619
+ "eval_runtime": 0.6472,
620
+ "eval_samples_per_second": 630.419,
621
+ "eval_steps_per_second": 1.545,
622
+ "step": 3400
623
+ },
624
+ {
625
+ "epoch": 30.0,
626
+ "step": 3450,
627
+ "total_flos": 1.83610838283264e+16,
628
+ "train_loss": 0.08607926879240119,
629
+ "train_runtime": 845.9298,
630
+ "train_samples_per_second": 130.082,
631
+ "train_steps_per_second": 4.078
632
+ }
633
+ ],
634
+ "logging_steps": 100,
635
+ "max_steps": 3450,
636
+ "num_input_tokens_seen": 0,
637
+ "num_train_epochs": 30,
638
+ "save_steps": 100,
639
+ "stateful_callbacks": {
640
+ "TrainerControl": {
641
+ "args": {
642
+ "should_epoch_stop": false,
643
+ "should_evaluate": false,
644
+ "should_log": false,
645
+ "should_save": true,
646
+ "should_training_stop": true
647
+ },
648
+ "attributes": {}
649
+ }
650
+ },
651
+ "total_flos": 1.83610838283264e+16,
652
+ "train_batch_size": 32,
653
+ "trial_name": null,
654
+ "trial_params": null
655
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/MRPC.tsv ADDED
@@ -0,0 +1,1726 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ index prediction
2
+ 0 1
3
+ 1 1
4
+ 2 1
5
+ 3 0
6
+ 4 0
7
+ 5 1
8
+ 6 0
9
+ 7 1
10
+ 8 1
11
+ 9 0
12
+ 10 1
13
+ 11 1
14
+ 12 1
15
+ 13 0
16
+ 14 1
17
+ 15 1
18
+ 16 1
19
+ 17 1
20
+ 18 1
21
+ 19 1
22
+ 20 1
23
+ 21 1
24
+ 22 1
25
+ 23 1
26
+ 24 0
27
+ 25 1
28
+ 26 1
29
+ 27 1
30
+ 28 0
31
+ 29 0
32
+ 30 1
33
+ 31 0
34
+ 32 0
35
+ 33 0
36
+ 34 1
37
+ 35 0
38
+ 36 1
39
+ 37 1
40
+ 38 0
41
+ 39 1
42
+ 40 1
43
+ 41 1
44
+ 42 0
45
+ 43 1
46
+ 44 1
47
+ 45 0
48
+ 46 0
49
+ 47 0
50
+ 48 1
51
+ 49 1
52
+ 50 1
53
+ 51 1
54
+ 52 1
55
+ 53 1
56
+ 54 0
57
+ 55 1
58
+ 56 1
59
+ 57 1
60
+ 58 0
61
+ 59 1
62
+ 60 1
63
+ 61 1
64
+ 62 1
65
+ 63 1
66
+ 64 1
67
+ 65 1
68
+ 66 1
69
+ 67 1
70
+ 68 1
71
+ 69 1
72
+ 70 0
73
+ 71 1
74
+ 72 1
75
+ 73 0
76
+ 74 1
77
+ 75 1
78
+ 76 1
79
+ 77 0
80
+ 78 0
81
+ 79 0
82
+ 80 0
83
+ 81 0
84
+ 82 1
85
+ 83 0
86
+ 84 0
87
+ 85 0
88
+ 86 1
89
+ 87 1
90
+ 88 0
91
+ 89 1
92
+ 90 0
93
+ 91 1
94
+ 92 1
95
+ 93 1
96
+ 94 1
97
+ 95 0
98
+ 96 1
99
+ 97 1
100
+ 98 0
101
+ 99 1
102
+ 100 1
103
+ 101 0
104
+ 102 1
105
+ 103 1
106
+ 104 1
107
+ 105 0
108
+ 106 0
109
+ 107 1
110
+ 108 1
111
+ 109 1
112
+ 110 1
113
+ 111 0
114
+ 112 0
115
+ 113 1
116
+ 114 1
117
+ 115 0
118
+ 116 0
119
+ 117 0
120
+ 118 1
121
+ 119 1
122
+ 120 1
123
+ 121 0
124
+ 122 1
125
+ 123 0
126
+ 124 1
127
+ 125 1
128
+ 126 1
129
+ 127 1
130
+ 128 0
131
+ 129 1
132
+ 130 0
133
+ 131 1
134
+ 132 1
135
+ 133 0
136
+ 134 1
137
+ 135 0
138
+ 136 0
139
+ 137 1
140
+ 138 1
141
+ 139 1
142
+ 140 1
143
+ 141 1
144
+ 142 1
145
+ 143 1
146
+ 144 1
147
+ 145 1
148
+ 146 0
149
+ 147 1
150
+ 148 1
151
+ 149 1
152
+ 150 1
153
+ 151 1
154
+ 152 0
155
+ 153 0
156
+ 154 1
157
+ 155 1
158
+ 156 1
159
+ 157 1
160
+ 158 1
161
+ 159 1
162
+ 160 1
163
+ 161 1
164
+ 162 1
165
+ 163 1
166
+ 164 0
167
+ 165 1
168
+ 166 1
169
+ 167 0
170
+ 168 1
171
+ 169 1
172
+ 170 1
173
+ 171 0
174
+ 172 1
175
+ 173 0
176
+ 174 0
177
+ 175 0
178
+ 176 1
179
+ 177 0
180
+ 178 1
181
+ 179 1
182
+ 180 0
183
+ 181 0
184
+ 182 0
185
+ 183 1
186
+ 184 1
187
+ 185 1
188
+ 186 0
189
+ 187 0
190
+ 188 1
191
+ 189 1
192
+ 190 0
193
+ 191 0
194
+ 192 1
195
+ 193 1
196
+ 194 0
197
+ 195 1
198
+ 196 0
199
+ 197 1
200
+ 198 1
201
+ 199 0
202
+ 200 0
203
+ 201 1
204
+ 202 1
205
+ 203 1
206
+ 204 1
207
+ 205 0
208
+ 206 1
209
+ 207 1
210
+ 208 0
211
+ 209 1
212
+ 210 0
213
+ 211 1
214
+ 212 1
215
+ 213 0
216
+ 214 1
217
+ 215 0
218
+ 216 1
219
+ 217 1
220
+ 218 1
221
+ 219 1
222
+ 220 1
223
+ 221 0
224
+ 222 1
225
+ 223 0
226
+ 224 1
227
+ 225 1
228
+ 226 1
229
+ 227 1
230
+ 228 0
231
+ 229 1
232
+ 230 1
233
+ 231 1
234
+ 232 0
235
+ 233 1
236
+ 234 0
237
+ 235 1
238
+ 236 1
239
+ 237 1
240
+ 238 0
241
+ 239 0
242
+ 240 1
243
+ 241 1
244
+ 242 1
245
+ 243 1
246
+ 244 0
247
+ 245 1
248
+ 246 1
249
+ 247 0
250
+ 248 1
251
+ 249 1
252
+ 250 1
253
+ 251 1
254
+ 252 0
255
+ 253 0
256
+ 254 1
257
+ 255 1
258
+ 256 1
259
+ 257 1
260
+ 258 1
261
+ 259 1
262
+ 260 1
263
+ 261 0
264
+ 262 1
265
+ 263 1
266
+ 264 0
267
+ 265 1
268
+ 266 0
269
+ 267 0
270
+ 268 1
271
+ 269 1
272
+ 270 1
273
+ 271 0
274
+ 272 1
275
+ 273 0
276
+ 274 0
277
+ 275 0
278
+ 276 1
279
+ 277 0
280
+ 278 1
281
+ 279 1
282
+ 280 1
283
+ 281 0
284
+ 282 1
285
+ 283 1
286
+ 284 1
287
+ 285 0
288
+ 286 1
289
+ 287 0
290
+ 288 0
291
+ 289 0
292
+ 290 0
293
+ 291 1
294
+ 292 1
295
+ 293 1
296
+ 294 1
297
+ 295 1
298
+ 296 0
299
+ 297 0
300
+ 298 1
301
+ 299 1
302
+ 300 1
303
+ 301 1
304
+ 302 0
305
+ 303 1
306
+ 304 1
307
+ 305 0
308
+ 306 1
309
+ 307 1
310
+ 308 0
311
+ 309 1
312
+ 310 1
313
+ 311 1
314
+ 312 0
315
+ 313 1
316
+ 314 1
317
+ 315 1
318
+ 316 1
319
+ 317 1
320
+ 318 1
321
+ 319 0
322
+ 320 1
323
+ 321 1
324
+ 322 1
325
+ 323 1
326
+ 324 1
327
+ 325 1
328
+ 326 1
329
+ 327 0
330
+ 328 0
331
+ 329 1
332
+ 330 0
333
+ 331 0
334
+ 332 1
335
+ 333 1
336
+ 334 1
337
+ 335 0
338
+ 336 1
339
+ 337 1
340
+ 338 0
341
+ 339 1
342
+ 340 1
343
+ 341 1
344
+ 342 1
345
+ 343 1
346
+ 344 0
347
+ 345 1
348
+ 346 0
349
+ 347 0
350
+ 348 0
351
+ 349 0
352
+ 350 1
353
+ 351 1
354
+ 352 0
355
+ 353 1
356
+ 354 0
357
+ 355 0
358
+ 356 0
359
+ 357 0
360
+ 358 1
361
+ 359 0
362
+ 360 0
363
+ 361 0
364
+ 362 0
365
+ 363 1
366
+ 364 0
367
+ 365 1
368
+ 366 0
369
+ 367 0
370
+ 368 0
371
+ 369 1
372
+ 370 1
373
+ 371 1
374
+ 372 1
375
+ 373 1
376
+ 374 1
377
+ 375 1
378
+ 376 0
379
+ 377 1
380
+ 378 0
381
+ 379 0
382
+ 380 1
383
+ 381 1
384
+ 382 1
385
+ 383 1
386
+ 384 1
387
+ 385 0
388
+ 386 1
389
+ 387 0
390
+ 388 1
391
+ 389 1
392
+ 390 1
393
+ 391 0
394
+ 392 0
395
+ 393 1
396
+ 394 1
397
+ 395 1
398
+ 396 1
399
+ 397 1
400
+ 398 0
401
+ 399 0
402
+ 400 1
403
+ 401 0
404
+ 402 1
405
+ 403 1
406
+ 404 1
407
+ 405 0
408
+ 406 0
409
+ 407 1
410
+ 408 1
411
+ 409 1
412
+ 410 1
413
+ 411 0
414
+ 412 1
415
+ 413 1
416
+ 414 1
417
+ 415 0
418
+ 416 0
419
+ 417 1
420
+ 418 1
421
+ 419 1
422
+ 420 1
423
+ 421 1
424
+ 422 1
425
+ 423 1
426
+ 424 0
427
+ 425 0
428
+ 426 0
429
+ 427 1
430
+ 428 0
431
+ 429 1
432
+ 430 0
433
+ 431 1
434
+ 432 1
435
+ 433 1
436
+ 434 1
437
+ 435 1
438
+ 436 1
439
+ 437 0
440
+ 438 1
441
+ 439 0
442
+ 440 0
443
+ 441 0
444
+ 442 1
445
+ 443 1
446
+ 444 1
447
+ 445 1
448
+ 446 1
449
+ 447 1
450
+ 448 1
451
+ 449 1
452
+ 450 0
453
+ 451 1
454
+ 452 1
455
+ 453 1
456
+ 454 1
457
+ 455 0
458
+ 456 1
459
+ 457 1
460
+ 458 0
461
+ 459 1
462
+ 460 1
463
+ 461 1
464
+ 462 1
465
+ 463 0
466
+ 464 0
467
+ 465 0
468
+ 466 1
469
+ 467 1
470
+ 468 1
471
+ 469 1
472
+ 470 0
473
+ 471 1
474
+ 472 1
475
+ 473 1
476
+ 474 1
477
+ 475 1
478
+ 476 1
479
+ 477 1
480
+ 478 1
481
+ 479 1
482
+ 480 0
483
+ 481 1
484
+ 482 1
485
+ 483 1
486
+ 484 0
487
+ 485 1
488
+ 486 1
489
+ 487 1
490
+ 488 1
491
+ 489 1
492
+ 490 1
493
+ 491 1
494
+ 492 1
495
+ 493 1
496
+ 494 0
497
+ 495 1
498
+ 496 1
499
+ 497 1
500
+ 498 0
501
+ 499 1
502
+ 500 1
503
+ 501 1
504
+ 502 1
505
+ 503 0
506
+ 504 1
507
+ 505 1
508
+ 506 0
509
+ 507 1
510
+ 508 0
511
+ 509 0
512
+ 510 1
513
+ 511 1
514
+ 512 0
515
+ 513 1
516
+ 514 1
517
+ 515 1
518
+ 516 1
519
+ 517 0
520
+ 518 1
521
+ 519 1
522
+ 520 1
523
+ 521 0
524
+ 522 1
525
+ 523 1
526
+ 524 1
527
+ 525 1
528
+ 526 1
529
+ 527 1
530
+ 528 1
531
+ 529 1
532
+ 530 1
533
+ 531 1
534
+ 532 1
535
+ 533 0
536
+ 534 1
537
+ 535 1
538
+ 536 1
539
+ 537 0
540
+ 538 0
541
+ 539 1
542
+ 540 0
543
+ 541 1
544
+ 542 1
545
+ 543 1
546
+ 544 1
547
+ 545 1
548
+ 546 0
549
+ 547 0
550
+ 548 0
551
+ 549 0
552
+ 550 0
553
+ 551 1
554
+ 552 0
555
+ 553 1
556
+ 554 0
557
+ 555 1
558
+ 556 0
559
+ 557 1
560
+ 558 1
561
+ 559 1
562
+ 560 1
563
+ 561 0
564
+ 562 0
565
+ 563 0
566
+ 564 1
567
+ 565 1
568
+ 566 1
569
+ 567 1
570
+ 568 0
571
+ 569 0
572
+ 570 1
573
+ 571 1
574
+ 572 1
575
+ 573 1
576
+ 574 1
577
+ 575 1
578
+ 576 1
579
+ 577 0
580
+ 578 1
581
+ 579 1
582
+ 580 1
583
+ 581 0
584
+ 582 1
585
+ 583 1
586
+ 584 1
587
+ 585 1
588
+ 586 0
589
+ 587 1
590
+ 588 1
591
+ 589 1
592
+ 590 1
593
+ 591 1
594
+ 592 1
595
+ 593 0
596
+ 594 0
597
+ 595 1
598
+ 596 1
599
+ 597 0
600
+ 598 1
601
+ 599 1
602
+ 600 0
603
+ 601 1
604
+ 602 1
605
+ 603 1
606
+ 604 1
607
+ 605 1
608
+ 606 1
609
+ 607 0
610
+ 608 0
611
+ 609 1
612
+ 610 1
613
+ 611 1
614
+ 612 0
615
+ 613 1
616
+ 614 1
617
+ 615 1
618
+ 616 1
619
+ 617 0
620
+ 618 1
621
+ 619 1
622
+ 620 1
623
+ 621 0
624
+ 622 0
625
+ 623 0
626
+ 624 1
627
+ 625 0
628
+ 626 1
629
+ 627 1
630
+ 628 0
631
+ 629 1
632
+ 630 0
633
+ 631 1
634
+ 632 1
635
+ 633 0
636
+ 634 0
637
+ 635 1
638
+ 636 0
639
+ 637 1
640
+ 638 1
641
+ 639 1
642
+ 640 1
643
+ 641 1
644
+ 642 1
645
+ 643 1
646
+ 644 1
647
+ 645 1
648
+ 646 1
649
+ 647 1
650
+ 648 1
651
+ 649 1
652
+ 650 1
653
+ 651 0
654
+ 652 0
655
+ 653 1
656
+ 654 0
657
+ 655 1
658
+ 656 1
659
+ 657 1
660
+ 658 1
661
+ 659 1
662
+ 660 1
663
+ 661 1
664
+ 662 1
665
+ 663 1
666
+ 664 1
667
+ 665 0
668
+ 666 1
669
+ 667 0
670
+ 668 0
671
+ 669 0
672
+ 670 1
673
+ 671 1
674
+ 672 0
675
+ 673 1
676
+ 674 1
677
+ 675 0
678
+ 676 1
679
+ 677 1
680
+ 678 1
681
+ 679 1
682
+ 680 0
683
+ 681 1
684
+ 682 1
685
+ 683 1
686
+ 684 0
687
+ 685 1
688
+ 686 1
689
+ 687 0
690
+ 688 0
691
+ 689 1
692
+ 690 1
693
+ 691 0
694
+ 692 0
695
+ 693 1
696
+ 694 0
697
+ 695 0
698
+ 696 1
699
+ 697 1
700
+ 698 0
701
+ 699 1
702
+ 700 0
703
+ 701 1
704
+ 702 1
705
+ 703 0
706
+ 704 1
707
+ 705 0
708
+ 706 0
709
+ 707 0
710
+ 708 1
711
+ 709 1
712
+ 710 0
713
+ 711 0
714
+ 712 1
715
+ 713 1
716
+ 714 1
717
+ 715 1
718
+ 716 1
719
+ 717 0
720
+ 718 1
721
+ 719 1
722
+ 720 1
723
+ 721 1
724
+ 722 0
725
+ 723 0
726
+ 724 1
727
+ 725 1
728
+ 726 1
729
+ 727 1
730
+ 728 1
731
+ 729 1
732
+ 730 1
733
+ 731 1
734
+ 732 1
735
+ 733 0
736
+ 734 1
737
+ 735 1
738
+ 736 1
739
+ 737 1
740
+ 738 1
741
+ 739 1
742
+ 740 1
743
+ 741 0
744
+ 742 1
745
+ 743 1
746
+ 744 1
747
+ 745 0
748
+ 746 1
749
+ 747 0
750
+ 748 1
751
+ 749 1
752
+ 750 1
753
+ 751 1
754
+ 752 1
755
+ 753 0
756
+ 754 1
757
+ 755 1
758
+ 756 1
759
+ 757 1
760
+ 758 1
761
+ 759 1
762
+ 760 1
763
+ 761 1
764
+ 762 1
765
+ 763 1
766
+ 764 1
767
+ 765 1
768
+ 766 1
769
+ 767 1
770
+ 768 1
771
+ 769 1
772
+ 770 1
773
+ 771 1
774
+ 772 1
775
+ 773 1
776
+ 774 1
777
+ 775 1
778
+ 776 1
779
+ 777 1
780
+ 778 0
781
+ 779 1
782
+ 780 0
783
+ 781 0
784
+ 782 1
785
+ 783 0
786
+ 784 0
787
+ 785 1
788
+ 786 1
789
+ 787 0
790
+ 788 1
791
+ 789 1
792
+ 790 1
793
+ 791 1
794
+ 792 0
795
+ 793 1
796
+ 794 1
797
+ 795 1
798
+ 796 0
799
+ 797 0
800
+ 798 1
801
+ 799 0
802
+ 800 0
803
+ 801 0
804
+ 802 1
805
+ 803 1
806
+ 804 0
807
+ 805 0
808
+ 806 1
809
+ 807 0
810
+ 808 1
811
+ 809 0
812
+ 810 1
813
+ 811 1
814
+ 812 0
815
+ 813 1
816
+ 814 0
817
+ 815 1
818
+ 816 0
819
+ 817 0
820
+ 818 0
821
+ 819 1
822
+ 820 0
823
+ 821 0
824
+ 822 1
825
+ 823 1
826
+ 824 1
827
+ 825 1
828
+ 826 1
829
+ 827 0
830
+ 828 0
831
+ 829 1
832
+ 830 1
833
+ 831 1
834
+ 832 1
835
+ 833 0
836
+ 834 1
837
+ 835 0
838
+ 836 1
839
+ 837 1
840
+ 838 0
841
+ 839 0
842
+ 840 0
843
+ 841 0
844
+ 842 1
845
+ 843 1
846
+ 844 1
847
+ 845 1
848
+ 846 1
849
+ 847 1
850
+ 848 0
851
+ 849 1
852
+ 850 1
853
+ 851 1
854
+ 852 1
855
+ 853 0
856
+ 854 1
857
+ 855 0
858
+ 856 1
859
+ 857 1
860
+ 858 0
861
+ 859 1
862
+ 860 0
863
+ 861 1
864
+ 862 1
865
+ 863 1
866
+ 864 0
867
+ 865 1
868
+ 866 1
869
+ 867 1
870
+ 868 1
871
+ 869 1
872
+ 870 0
873
+ 871 1
874
+ 872 1
875
+ 873 1
876
+ 874 1
877
+ 875 0
878
+ 876 1
879
+ 877 0
880
+ 878 0
881
+ 879 0
882
+ 880 0
883
+ 881 1
884
+ 882 1
885
+ 883 1
886
+ 884 1
887
+ 885 1
888
+ 886 1
889
+ 887 1
890
+ 888 0
891
+ 889 1
892
+ 890 1
893
+ 891 1
894
+ 892 0
895
+ 893 0
896
+ 894 0
897
+ 895 0
898
+ 896 0
899
+ 897 1
900
+ 898 1
901
+ 899 1
902
+ 900 1
903
+ 901 1
904
+ 902 0
905
+ 903 1
906
+ 904 1
907
+ 905 1
908
+ 906 1
909
+ 907 1
910
+ 908 1
911
+ 909 1
912
+ 910 0
913
+ 911 1
914
+ 912 0
915
+ 913 0
916
+ 914 1
917
+ 915 1
918
+ 916 1
919
+ 917 1
920
+ 918 0
921
+ 919 1
922
+ 920 0
923
+ 921 1
924
+ 922 1
925
+ 923 1
926
+ 924 1
927
+ 925 1
928
+ 926 1
929
+ 927 0
930
+ 928 0
931
+ 929 1
932
+ 930 1
933
+ 931 1
934
+ 932 0
935
+ 933 0
936
+ 934 0
937
+ 935 1
938
+ 936 1
939
+ 937 0
940
+ 938 1
941
+ 939 1
942
+ 940 1
943
+ 941 1
944
+ 942 0
945
+ 943 1
946
+ 944 1
947
+ 945 0
948
+ 946 1
949
+ 947 1
950
+ 948 0
951
+ 949 1
952
+ 950 0
953
+ 951 1
954
+ 952 0
955
+ 953 1
956
+ 954 1
957
+ 955 1
958
+ 956 1
959
+ 957 1
960
+ 958 1
961
+ 959 1
962
+ 960 1
963
+ 961 1
964
+ 962 1
965
+ 963 0
966
+ 964 0
967
+ 965 1
968
+ 966 1
969
+ 967 1
970
+ 968 1
971
+ 969 1
972
+ 970 1
973
+ 971 0
974
+ 972 1
975
+ 973 1
976
+ 974 1
977
+ 975 1
978
+ 976 1
979
+ 977 1
980
+ 978 1
981
+ 979 1
982
+ 980 1
983
+ 981 1
984
+ 982 0
985
+ 983 1
986
+ 984 0
987
+ 985 0
988
+ 986 1
989
+ 987 0
990
+ 988 1
991
+ 989 0
992
+ 990 0
993
+ 991 1
994
+ 992 1
995
+ 993 1
996
+ 994 0
997
+ 995 1
998
+ 996 1
999
+ 997 1
1000
+ 998 1
1001
+ 999 1
1002
+ 1000 0
1003
+ 1001 1
1004
+ 1002 0
1005
+ 1003 0
1006
+ 1004 1
1007
+ 1005 0
1008
+ 1006 1
1009
+ 1007 1
1010
+ 1008 1
1011
+ 1009 0
1012
+ 1010 1
1013
+ 1011 0
1014
+ 1012 1
1015
+ 1013 1
1016
+ 1014 1
1017
+ 1015 1
1018
+ 1016 1
1019
+ 1017 1
1020
+ 1018 1
1021
+ 1019 1
1022
+ 1020 1
1023
+ 1021 1
1024
+ 1022 1
1025
+ 1023 0
1026
+ 1024 1
1027
+ 1025 0
1028
+ 1026 0
1029
+ 1027 1
1030
+ 1028 0
1031
+ 1029 1
1032
+ 1030 0
1033
+ 1031 1
1034
+ 1032 1
1035
+ 1033 0
1036
+ 1034 1
1037
+ 1035 0
1038
+ 1036 1
1039
+ 1037 1
1040
+ 1038 1
1041
+ 1039 0
1042
+ 1040 0
1043
+ 1041 1
1044
+ 1042 0
1045
+ 1043 0
1046
+ 1044 1
1047
+ 1045 1
1048
+ 1046 0
1049
+ 1047 1
1050
+ 1048 1
1051
+ 1049 1
1052
+ 1050 1
1053
+ 1051 1
1054
+ 1052 1
1055
+ 1053 1
1056
+ 1054 0
1057
+ 1055 1
1058
+ 1056 1
1059
+ 1057 1
1060
+ 1058 1
1061
+ 1059 1
1062
+ 1060 1
1063
+ 1061 1
1064
+ 1062 1
1065
+ 1063 1
1066
+ 1064 1
1067
+ 1065 1
1068
+ 1066 1
1069
+ 1067 1
1070
+ 1068 0
1071
+ 1069 1
1072
+ 1070 1
1073
+ 1071 1
1074
+ 1072 1
1075
+ 1073 1
1076
+ 1074 1
1077
+ 1075 1
1078
+ 1076 1
1079
+ 1077 1
1080
+ 1078 0
1081
+ 1079 1
1082
+ 1080 0
1083
+ 1081 0
1084
+ 1082 1
1085
+ 1083 1
1086
+ 1084 1
1087
+ 1085 1
1088
+ 1086 1
1089
+ 1087 0
1090
+ 1088 1
1091
+ 1089 1
1092
+ 1090 1
1093
+ 1091 0
1094
+ 1092 1
1095
+ 1093 1
1096
+ 1094 1
1097
+ 1095 1
1098
+ 1096 1
1099
+ 1097 1
1100
+ 1098 1
1101
+ 1099 0
1102
+ 1100 1
1103
+ 1101 1
1104
+ 1102 0
1105
+ 1103 1
1106
+ 1104 1
1107
+ 1105 0
1108
+ 1106 1
1109
+ 1107 0
1110
+ 1108 0
1111
+ 1109 1
1112
+ 1110 0
1113
+ 1111 1
1114
+ 1112 0
1115
+ 1113 0
1116
+ 1114 1
1117
+ 1115 1
1118
+ 1116 0
1119
+ 1117 1
1120
+ 1118 1
1121
+ 1119 1
1122
+ 1120 0
1123
+ 1121 0
1124
+ 1122 1
1125
+ 1123 1
1126
+ 1124 0
1127
+ 1125 1
1128
+ 1126 0
1129
+ 1127 0
1130
+ 1128 1
1131
+ 1129 1
1132
+ 1130 0
1133
+ 1131 1
1134
+ 1132 0
1135
+ 1133 1
1136
+ 1134 0
1137
+ 1135 1
1138
+ 1136 0
1139
+ 1137 1
1140
+ 1138 0
1141
+ 1139 0
1142
+ 1140 1
1143
+ 1141 1
1144
+ 1142 1
1145
+ 1143 1
1146
+ 1144 0
1147
+ 1145 1
1148
+ 1146 1
1149
+ 1147 0
1150
+ 1148 1
1151
+ 1149 1
1152
+ 1150 1
1153
+ 1151 1
1154
+ 1152 0
1155
+ 1153 1
1156
+ 1154 0
1157
+ 1155 1
1158
+ 1156 1
1159
+ 1157 0
1160
+ 1158 0
1161
+ 1159 1
1162
+ 1160 1
1163
+ 1161 1
1164
+ 1162 1
1165
+ 1163 1
1166
+ 1164 1
1167
+ 1165 0
1168
+ 1166 1
1169
+ 1167 1
1170
+ 1168 1
1171
+ 1169 1
1172
+ 1170 0
1173
+ 1171 1
1174
+ 1172 1
1175
+ 1173 1
1176
+ 1174 1
1177
+ 1175 0
1178
+ 1176 1
1179
+ 1177 0
1180
+ 1178 1
1181
+ 1179 1
1182
+ 1180 0
1183
+ 1181 0
1184
+ 1182 1
1185
+ 1183 1
1186
+ 1184 1
1187
+ 1185 1
1188
+ 1186 0
1189
+ 1187 1
1190
+ 1188 1
1191
+ 1189 0
1192
+ 1190 0
1193
+ 1191 0
1194
+ 1192 1
1195
+ 1193 1
1196
+ 1194 1
1197
+ 1195 1
1198
+ 1196 0
1199
+ 1197 1
1200
+ 1198 1
1201
+ 1199 1
1202
+ 1200 1
1203
+ 1201 1
1204
+ 1202 0
1205
+ 1203 1
1206
+ 1204 0
1207
+ 1205 1
1208
+ 1206 0
1209
+ 1207 0
1210
+ 1208 1
1211
+ 1209 1
1212
+ 1210 1
1213
+ 1211 1
1214
+ 1212 1
1215
+ 1213 1
1216
+ 1214 1
1217
+ 1215 1
1218
+ 1216 1
1219
+ 1217 1
1220
+ 1218 1
1221
+ 1219 1
1222
+ 1220 1
1223
+ 1221 1
1224
+ 1222 1
1225
+ 1223 1
1226
+ 1224 0
1227
+ 1225 1
1228
+ 1226 0
1229
+ 1227 1
1230
+ 1228 1
1231
+ 1229 1
1232
+ 1230 1
1233
+ 1231 0
1234
+ 1232 1
1235
+ 1233 1
1236
+ 1234 1
1237
+ 1235 1
1238
+ 1236 1
1239
+ 1237 1
1240
+ 1238 1
1241
+ 1239 1
1242
+ 1240 1
1243
+ 1241 0
1244
+ 1242 1
1245
+ 1243 1
1246
+ 1244 1
1247
+ 1245 1
1248
+ 1246 0
1249
+ 1247 0
1250
+ 1248 1
1251
+ 1249 1
1252
+ 1250 1
1253
+ 1251 1
1254
+ 1252 1
1255
+ 1253 1
1256
+ 1254 1
1257
+ 1255 1
1258
+ 1256 1
1259
+ 1257 1
1260
+ 1258 1
1261
+ 1259 0
1262
+ 1260 1
1263
+ 1261 1
1264
+ 1262 1
1265
+ 1263 1
1266
+ 1264 1
1267
+ 1265 1
1268
+ 1266 0
1269
+ 1267 1
1270
+ 1268 1
1271
+ 1269 1
1272
+ 1270 1
1273
+ 1271 1
1274
+ 1272 0
1275
+ 1273 0
1276
+ 1274 0
1277
+ 1275 1
1278
+ 1276 0
1279
+ 1277 1
1280
+ 1278 0
1281
+ 1279 0
1282
+ 1280 1
1283
+ 1281 1
1284
+ 1282 0
1285
+ 1283 0
1286
+ 1284 1
1287
+ 1285 1
1288
+ 1286 1
1289
+ 1287 1
1290
+ 1288 1
1291
+ 1289 1
1292
+ 1290 1
1293
+ 1291 1
1294
+ 1292 0
1295
+ 1293 0
1296
+ 1294 0
1297
+ 1295 1
1298
+ 1296 1
1299
+ 1297 1
1300
+ 1298 1
1301
+ 1299 1
1302
+ 1300 0
1303
+ 1301 0
1304
+ 1302 1
1305
+ 1303 1
1306
+ 1304 1
1307
+ 1305 0
1308
+ 1306 0
1309
+ 1307 1
1310
+ 1308 1
1311
+ 1309 0
1312
+ 1310 1
1313
+ 1311 1
1314
+ 1312 0
1315
+ 1313 1
1316
+ 1314 1
1317
+ 1315 1
1318
+ 1316 1
1319
+ 1317 1
1320
+ 1318 0
1321
+ 1319 1
1322
+ 1320 1
1323
+ 1321 0
1324
+ 1322 1
1325
+ 1323 1
1326
+ 1324 1
1327
+ 1325 1
1328
+ 1326 0
1329
+ 1327 1
1330
+ 1328 1
1331
+ 1329 0
1332
+ 1330 0
1333
+ 1331 1
1334
+ 1332 1
1335
+ 1333 0
1336
+ 1334 1
1337
+ 1335 1
1338
+ 1336 0
1339
+ 1337 0
1340
+ 1338 0
1341
+ 1339 1
1342
+ 1340 0
1343
+ 1341 0
1344
+ 1342 0
1345
+ 1343 0
1346
+ 1344 1
1347
+ 1345 1
1348
+ 1346 1
1349
+ 1347 1
1350
+ 1348 1
1351
+ 1349 1
1352
+ 1350 1
1353
+ 1351 1
1354
+ 1352 1
1355
+ 1353 0
1356
+ 1354 0
1357
+ 1355 0
1358
+ 1356 1
1359
+ 1357 0
1360
+ 1358 0
1361
+ 1359 1
1362
+ 1360 1
1363
+ 1361 1
1364
+ 1362 1
1365
+ 1363 0
1366
+ 1364 1
1367
+ 1365 0
1368
+ 1366 0
1369
+ 1367 0
1370
+ 1368 1
1371
+ 1369 0
1372
+ 1370 0
1373
+ 1371 0
1374
+ 1372 1
1375
+ 1373 1
1376
+ 1374 0
1377
+ 1375 1
1378
+ 1376 0
1379
+ 1377 1
1380
+ 1378 1
1381
+ 1379 1
1382
+ 1380 1
1383
+ 1381 1
1384
+ 1382 1
1385
+ 1383 0
1386
+ 1384 1
1387
+ 1385 1
1388
+ 1386 0
1389
+ 1387 0
1390
+ 1388 1
1391
+ 1389 1
1392
+ 1390 1
1393
+ 1391 0
1394
+ 1392 0
1395
+ 1393 1
1396
+ 1394 1
1397
+ 1395 1
1398
+ 1396 0
1399
+ 1397 1
1400
+ 1398 1
1401
+ 1399 1
1402
+ 1400 0
1403
+ 1401 0
1404
+ 1402 1
1405
+ 1403 0
1406
+ 1404 1
1407
+ 1405 0
1408
+ 1406 1
1409
+ 1407 1
1410
+ 1408 1
1411
+ 1409 1
1412
+ 1410 1
1413
+ 1411 1
1414
+ 1412 1
1415
+ 1413 1
1416
+ 1414 0
1417
+ 1415 1
1418
+ 1416 1
1419
+ 1417 0
1420
+ 1418 1
1421
+ 1419 1
1422
+ 1420 1
1423
+ 1421 1
1424
+ 1422 0
1425
+ 1423 0
1426
+ 1424 1
1427
+ 1425 1
1428
+ 1426 0
1429
+ 1427 1
1430
+ 1428 1
1431
+ 1429 1
1432
+ 1430 0
1433
+ 1431 1
1434
+ 1432 1
1435
+ 1433 0
1436
+ 1434 0
1437
+ 1435 0
1438
+ 1436 1
1439
+ 1437 1
1440
+ 1438 1
1441
+ 1439 1
1442
+ 1440 1
1443
+ 1441 1
1444
+ 1442 0
1445
+ 1443 0
1446
+ 1444 0
1447
+ 1445 1
1448
+ 1446 1
1449
+ 1447 1
1450
+ 1448 0
1451
+ 1449 1
1452
+ 1450 1
1453
+ 1451 0
1454
+ 1452 1
1455
+ 1453 1
1456
+ 1454 1
1457
+ 1455 1
1458
+ 1456 1
1459
+ 1457 1
1460
+ 1458 1
1461
+ 1459 1
1462
+ 1460 1
1463
+ 1461 1
1464
+ 1462 1
1465
+ 1463 0
1466
+ 1464 1
1467
+ 1465 1
1468
+ 1466 1
1469
+ 1467 1
1470
+ 1468 1
1471
+ 1469 0
1472
+ 1470 1
1473
+ 1471 1
1474
+ 1472 0
1475
+ 1473 0
1476
+ 1474 1
1477
+ 1475 0
1478
+ 1476 0
1479
+ 1477 1
1480
+ 1478 1
1481
+ 1479 0
1482
+ 1480 1
1483
+ 1481 0
1484
+ 1482 1
1485
+ 1483 0
1486
+ 1484 0
1487
+ 1485 1
1488
+ 1486 0
1489
+ 1487 0
1490
+ 1488 1
1491
+ 1489 1
1492
+ 1490 0
1493
+ 1491 1
1494
+ 1492 1
1495
+ 1493 0
1496
+ 1494 0
1497
+ 1495 1
1498
+ 1496 1
1499
+ 1497 0
1500
+ 1498 0
1501
+ 1499 0
1502
+ 1500 1
1503
+ 1501 1
1504
+ 1502 1
1505
+ 1503 1
1506
+ 1504 1
1507
+ 1505 1
1508
+ 1506 1
1509
+ 1507 1
1510
+ 1508 1
1511
+ 1509 0
1512
+ 1510 1
1513
+ 1511 1
1514
+ 1512 1
1515
+ 1513 1
1516
+ 1514 0
1517
+ 1515 1
1518
+ 1516 0
1519
+ 1517 1
1520
+ 1518 0
1521
+ 1519 0
1522
+ 1520 0
1523
+ 1521 0
1524
+ 1522 0
1525
+ 1523 1
1526
+ 1524 1
1527
+ 1525 1
1528
+ 1526 1
1529
+ 1527 1
1530
+ 1528 0
1531
+ 1529 1
1532
+ 1530 1
1533
+ 1531 1
1534
+ 1532 1
1535
+ 1533 1
1536
+ 1534 1
1537
+ 1535 0
1538
+ 1536 1
1539
+ 1537 1
1540
+ 1538 1
1541
+ 1539 0
1542
+ 1540 0
1543
+ 1541 1
1544
+ 1542 0
1545
+ 1543 1
1546
+ 1544 0
1547
+ 1545 0
1548
+ 1546 1
1549
+ 1547 0
1550
+ 1548 1
1551
+ 1549 1
1552
+ 1550 1
1553
+ 1551 0
1554
+ 1552 0
1555
+ 1553 0
1556
+ 1554 1
1557
+ 1555 1
1558
+ 1556 0
1559
+ 1557 1
1560
+ 1558 0
1561
+ 1559 1
1562
+ 1560 1
1563
+ 1561 0
1564
+ 1562 1
1565
+ 1563 1
1566
+ 1564 1
1567
+ 1565 1
1568
+ 1566 1
1569
+ 1567 1
1570
+ 1568 1
1571
+ 1569 1
1572
+ 1570 0
1573
+ 1571 1
1574
+ 1572 0
1575
+ 1573 0
1576
+ 1574 1
1577
+ 1575 1
1578
+ 1576 1
1579
+ 1577 0
1580
+ 1578 1
1581
+ 1579 1
1582
+ 1580 1
1583
+ 1581 1
1584
+ 1582 1
1585
+ 1583 1
1586
+ 1584 0
1587
+ 1585 1
1588
+ 1586 0
1589
+ 1587 1
1590
+ 1588 1
1591
+ 1589 1
1592
+ 1590 0
1593
+ 1591 1
1594
+ 1592 1
1595
+ 1593 0
1596
+ 1594 0
1597
+ 1595 1
1598
+ 1596 1
1599
+ 1597 0
1600
+ 1598 1
1601
+ 1599 1
1602
+ 1600 1
1603
+ 1601 1
1604
+ 1602 1
1605
+ 1603 1
1606
+ 1604 1
1607
+ 1605 0
1608
+ 1606 1
1609
+ 1607 1
1610
+ 1608 0
1611
+ 1609 1
1612
+ 1610 0
1613
+ 1611 1
1614
+ 1612 1
1615
+ 1613 1
1616
+ 1614 1
1617
+ 1615 1
1618
+ 1616 1
1619
+ 1617 0
1620
+ 1618 1
1621
+ 1619 1
1622
+ 1620 0
1623
+ 1621 0
1624
+ 1622 0
1625
+ 1623 1
1626
+ 1624 1
1627
+ 1625 0
1628
+ 1626 1
1629
+ 1627 1
1630
+ 1628 1
1631
+ 1629 0
1632
+ 1630 0
1633
+ 1631 1
1634
+ 1632 1
1635
+ 1633 1
1636
+ 1634 1
1637
+ 1635 1
1638
+ 1636 1
1639
+ 1637 1
1640
+ 1638 0
1641
+ 1639 0
1642
+ 1640 0
1643
+ 1641 1
1644
+ 1642 1
1645
+ 1643 1
1646
+ 1644 1
1647
+ 1645 1
1648
+ 1646 1
1649
+ 1647 0
1650
+ 1648 1
1651
+ 1649 1
1652
+ 1650 1
1653
+ 1651 0
1654
+ 1652 0
1655
+ 1653 1
1656
+ 1654 1
1657
+ 1655 1
1658
+ 1656 0
1659
+ 1657 0
1660
+ 1658 1
1661
+ 1659 1
1662
+ 1660 0
1663
+ 1661 0
1664
+ 1662 0
1665
+ 1663 1
1666
+ 1664 0
1667
+ 1665 0
1668
+ 1666 0
1669
+ 1667 1
1670
+ 1668 0
1671
+ 1669 1
1672
+ 1670 1
1673
+ 1671 0
1674
+ 1672 1
1675
+ 1673 1
1676
+ 1674 1
1677
+ 1675 0
1678
+ 1676 1
1679
+ 1677 1
1680
+ 1678 1
1681
+ 1679 1
1682
+ 1680 0
1683
+ 1681 1
1684
+ 1682 1
1685
+ 1683 0
1686
+ 1684 1
1687
+ 1685 1
1688
+ 1686 0
1689
+ 1687 0
1690
+ 1688 1
1691
+ 1689 1
1692
+ 1690 1
1693
+ 1691 0
1694
+ 1692 1
1695
+ 1693 1
1696
+ 1694 0
1697
+ 1695 1
1698
+ 1696 1
1699
+ 1697 0
1700
+ 1698 0
1701
+ 1699 1
1702
+ 1700 0
1703
+ 1701 1
1704
+ 1702 0
1705
+ 1703 1
1706
+ 1704 1
1707
+ 1705 1
1708
+ 1706 0
1709
+ 1707 0
1710
+ 1708 1
1711
+ 1709 1
1712
+ 1710 1
1713
+ 1711 1
1714
+ 1712 1
1715
+ 1713 0
1716
+ 1714 1
1717
+ 1715 1
1718
+ 1716 1
1719
+ 1717 1
1720
+ 1718 1
1721
+ 1719 1
1722
+ 1720 0
1723
+ 1721 0
1724
+ 1722 0
1725
+ 1723 1
1726
+ 1724 1
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/all_results.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 30.0,
3
+ "eval_accuracy": 0.9191176470588235,
4
+ "eval_combined_score": 0.9303552837064029,
5
+ "eval_f1": 0.9415929203539823,
6
+ "eval_loss": 0.43192246556282043,
7
+ "eval_runtime": 0.5887,
8
+ "eval_samples": 408,
9
+ "eval_samples_per_second": 693.086,
10
+ "eval_steps_per_second": 1.699
11
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/eval_results.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 30.0,
3
+ "eval_accuracy": 0.9191176470588235,
4
+ "eval_combined_score": 0.9303552837064029,
5
+ "eval_f1": 0.9415929203539823,
6
+ "eval_loss": 0.43192246556282043,
7
+ "eval_runtime": 0.5887,
8
+ "eval_samples": 408,
9
+ "eval_samples_per_second": 693.086,
10
+ "eval_steps_per_second": 1.699
11
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "T": 1.0,
3
+ "base_model_name_or_path": "microsoft/deberta-v3-base",
4
+ "bias": "none",
5
+ "drop_out": 0.0,
6
+ "inference_mode": false,
7
+ "layers_to_transform": null,
8
+ "modules_to_save": [
9
+ "classifier",
10
+ "pooler"
11
+ ],
12
+ "num_rotations": 1,
13
+ "peft_type": "ROTATION",
14
+ "r": 4,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "key_proj",
18
+ "output.dense",
19
+ "query_proj",
20
+ "attention.output.dense",
21
+ "intermediate.dense",
22
+ "value_proj"
23
+ ],
24
+ "target_modules_to_skip": null,
25
+ "task_type": "SEQ_CLS"
26
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[MASK]": 128000
3
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[SEP]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": {
9
+ "content": "[UNK]",
10
+ "lstrip": false,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ }
15
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
3
+ size 2464616
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft/tokenizer_config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[CLS]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[SEP]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[UNK]",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128000": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "bos_token": "[CLS]",
45
+ "clean_up_tokenization_spaces": false,
46
+ "cls_token": "[CLS]",
47
+ "do_lower_case": false,
48
+ "eos_token": "[SEP]",
49
+ "extra_special_tokens": {},
50
+ "mask_token": "[MASK]",
51
+ "model_max_length": 512,
52
+ "pad_token": "[PAD]",
53
+ "padding_side": "right",
54
+ "sep_token": "[SEP]",
55
+ "sp_model_kwargs": {},
56
+ "split_by_punct": false,
57
+ "tokenizer_class": "DebertaV2Tokenizer",
58
+ "unk_token": "[UNK]",
59
+ "vocab_type": "spm"
60
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft2/adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "T": 1.0,
3
+ "base_model_name_or_path": "microsoft/deberta-v3-base",
4
+ "bias": "none",
5
+ "drop_out": 0.0,
6
+ "inference_mode": true,
7
+ "layers_to_transform": null,
8
+ "modules_to_save": [
9
+ "classifier",
10
+ "pooler"
11
+ ],
12
+ "num_rotations": 1,
13
+ "peft_type": "ROTATION",
14
+ "r": 4,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "key_proj",
18
+ "output.dense",
19
+ "query_proj",
20
+ "attention.output.dense",
21
+ "intermediate.dense",
22
+ "value_proj"
23
+ ],
24
+ "target_modules_to_skip": null,
25
+ "task_type": "SEQ_CLS"
26
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/ft2/adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8745c7b476e73f1c87b67d5afdac5acce7282fe9eca17ab68d40f16b9eb2e5ca
3
+ size 7449859
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/trainer_state.json ADDED
@@ -0,0 +1,655 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 1500,
3
+ "best_metric": 0.9191176470588235,
4
+ "best_model_checkpoint": "./glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h32m05,r=4,s43/checkpoint-1500",
5
+ "epoch": 30.0,
6
+ "eval_steps": 100,
7
+ "global_step": 3450,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.8695652173913043,
14
+ "grad_norm": 1.5893659591674805,
15
+ "learning_rate": 0.00198,
16
+ "loss": 0.5836,
17
+ "step": 100
18
+ },
19
+ {
20
+ "epoch": 0.8695652173913043,
21
+ "eval_accuracy": 0.7426470588235294,
22
+ "eval_combined_score": 0.7916573685619923,
23
+ "eval_f1": 0.8406676783004552,
24
+ "eval_loss": 0.7225454449653625,
25
+ "eval_runtime": 0.6841,
26
+ "eval_samples_per_second": 596.379,
27
+ "eval_steps_per_second": 1.462,
28
+ "step": 100
29
+ },
30
+ {
31
+ "epoch": 1.7391304347826086,
32
+ "grad_norm": 1.7734744548797607,
33
+ "learning_rate": 0.0019956152348614225,
34
+ "loss": 0.3627,
35
+ "step": 200
36
+ },
37
+ {
38
+ "epoch": 1.7391304347826086,
39
+ "eval_accuracy": 0.8848039215686274,
40
+ "eval_combined_score": 0.9019544564813877,
41
+ "eval_f1": 0.919104991394148,
42
+ "eval_loss": 0.29524144530296326,
43
+ "eval_runtime": 0.5761,
44
+ "eval_samples_per_second": 708.244,
45
+ "eval_steps_per_second": 1.736,
46
+ "step": 200
47
+ },
48
+ {
49
+ "epoch": 2.608695652173913,
50
+ "grad_norm": 3.618133068084717,
51
+ "learning_rate": 0.0019823226955326743,
52
+ "loss": 0.2857,
53
+ "step": 300
54
+ },
55
+ {
56
+ "epoch": 2.608695652173913,
57
+ "eval_accuracy": 0.8872549019607843,
58
+ "eval_combined_score": 0.9029914085775652,
59
+ "eval_f1": 0.9187279151943463,
60
+ "eval_loss": 0.33274513483047485,
61
+ "eval_runtime": 0.5751,
62
+ "eval_samples_per_second": 709.477,
63
+ "eval_steps_per_second": 1.739,
64
+ "step": 300
65
+ },
66
+ {
67
+ "epoch": 3.4782608695652173,
68
+ "grad_norm": 1.351619839668274,
69
+ "learning_rate": 0.0019602408686963785,
70
+ "loss": 0.225,
71
+ "step": 400
72
+ },
73
+ {
74
+ "epoch": 3.4782608695652173,
75
+ "eval_accuracy": 0.9019607843137255,
76
+ "eval_combined_score": 0.9160153571918278,
77
+ "eval_f1": 0.9300699300699301,
78
+ "eval_loss": 0.28772440552711487,
79
+ "eval_runtime": 0.5909,
80
+ "eval_samples_per_second": 690.416,
81
+ "eval_steps_per_second": 1.692,
82
+ "step": 400
83
+ },
84
+ {
85
+ "epoch": 4.3478260869565215,
86
+ "grad_norm": 3.242846965789795,
87
+ "learning_rate": 0.0019295673304908422,
88
+ "loss": 0.1716,
89
+ "step": 500
90
+ },
91
+ {
92
+ "epoch": 4.3478260869565215,
93
+ "eval_accuracy": 0.8700980392156863,
94
+ "eval_combined_score": 0.8903609926263929,
95
+ "eval_f1": 0.9106239460370995,
96
+ "eval_loss": 0.42163893580436707,
97
+ "eval_runtime": 0.5909,
98
+ "eval_samples_per_second": 690.434,
99
+ "eval_steps_per_second": 1.692,
100
+ "step": 500
101
+ },
102
+ {
103
+ "epoch": 5.217391304347826,
104
+ "grad_norm": 7.72324275970459,
105
+ "learning_rate": 0.001890576530999922,
106
+ "loss": 0.1191,
107
+ "step": 600
108
+ },
109
+ {
110
+ "epoch": 5.217391304347826,
111
+ "eval_accuracy": 0.9019607843137255,
112
+ "eval_combined_score": 0.9162581699346406,
113
+ "eval_f1": 0.9305555555555556,
114
+ "eval_loss": 0.6345019340515137,
115
+ "eval_runtime": 0.5872,
116
+ "eval_samples_per_second": 694.841,
117
+ "eval_steps_per_second": 1.703,
118
+ "step": 600
119
+ },
120
+ {
121
+ "epoch": 6.086956521739131,
122
+ "grad_norm": 11.79227352142334,
123
+ "learning_rate": 0.0018436173386234143,
124
+ "loss": 0.1218,
125
+ "step": 700
126
+ },
127
+ {
128
+ "epoch": 6.086956521739131,
129
+ "eval_accuracy": 0.8651960784313726,
130
+ "eval_combined_score": 0.8863795518207283,
131
+ "eval_f1": 0.907563025210084,
132
+ "eval_loss": 0.6440550088882446,
133
+ "eval_runtime": 0.592,
134
+ "eval_samples_per_second": 689.203,
135
+ "eval_steps_per_second": 1.689,
136
+ "step": 700
137
+ },
138
+ {
139
+ "epoch": 6.956521739130435,
140
+ "grad_norm": 6.427208423614502,
141
+ "learning_rate": 0.001789109918592965,
142
+ "loss": 0.0815,
143
+ "step": 800
144
+ },
145
+ {
146
+ "epoch": 6.956521739130435,
147
+ "eval_accuracy": 0.8774509803921569,
148
+ "eval_combined_score": 0.8959172710179962,
149
+ "eval_f1": 0.9143835616438356,
150
+ "eval_loss": 0.5999459028244019,
151
+ "eval_runtime": 0.5867,
152
+ "eval_samples_per_second": 695.377,
153
+ "eval_steps_per_second": 1.704,
154
+ "step": 800
155
+ },
156
+ {
157
+ "epoch": 7.826086956521739,
158
+ "grad_norm": 4.7944841384887695,
159
+ "learning_rate": 0.001727541973562826,
160
+ "loss": 0.0735,
161
+ "step": 900
162
+ },
163
+ {
164
+ "epoch": 7.826086956521739,
165
+ "eval_accuracy": 0.8823529411764706,
166
+ "eval_combined_score": 0.9004985044865403,
167
+ "eval_f1": 0.9186440677966101,
168
+ "eval_loss": 0.5001353025436401,
169
+ "eval_runtime": 0.5871,
170
+ "eval_samples_per_second": 694.956,
171
+ "eval_steps_per_second": 1.703,
172
+ "step": 900
173
+ },
174
+ {
175
+ "epoch": 8.695652173913043,
176
+ "grad_norm": 1.232080340385437,
177
+ "learning_rate": 0.001659464379912601,
178
+ "loss": 0.0396,
179
+ "step": 1000
180
+ },
181
+ {
182
+ "epoch": 8.695652173913043,
183
+ "eval_accuracy": 0.8872549019607843,
184
+ "eval_combined_score": 0.903972278566599,
185
+ "eval_f1": 0.9206896551724137,
186
+ "eval_loss": 0.6272836327552795,
187
+ "eval_runtime": 0.5868,
188
+ "eval_samples_per_second": 695.335,
189
+ "eval_steps_per_second": 1.704,
190
+ "step": 1000
191
+ },
192
+ {
193
+ "epoch": 9.565217391304348,
194
+ "grad_norm": 0.12160742282867432,
195
+ "learning_rate": 0.0015854862588059726,
196
+ "loss": 0.0454,
197
+ "step": 1100
198
+ },
199
+ {
200
+ "epoch": 9.565217391304348,
201
+ "eval_accuracy": 0.9044117647058824,
202
+ "eval_combined_score": 0.917196905692259,
203
+ "eval_f1": 0.9299820466786356,
204
+ "eval_loss": 0.5900373458862305,
205
+ "eval_runtime": 0.5853,
206
+ "eval_samples_per_second": 697.098,
207
+ "eval_steps_per_second": 1.709,
208
+ "step": 1100
209
+ },
210
+ {
211
+ "epoch": 10.434782608695652,
212
+ "grad_norm": 3.2582204341888428,
213
+ "learning_rate": 0.0015062695261068735,
214
+ "loss": 0.0505,
215
+ "step": 1200
216
+ },
217
+ {
218
+ "epoch": 10.434782608695652,
219
+ "eval_accuracy": 0.8921568627450981,
220
+ "eval_combined_score": 0.9080161476355249,
221
+ "eval_f1": 0.9238754325259516,
222
+ "eval_loss": 0.7707640528678894,
223
+ "eval_runtime": 0.5885,
224
+ "eval_samples_per_second": 693.277,
225
+ "eval_steps_per_second": 1.699,
226
+ "step": 1200
227
+ },
228
+ {
229
+ "epoch": 11.304347826086957,
230
+ "grad_norm": 0.03892024978995323,
231
+ "learning_rate": 0.0014225229699174897,
232
+ "loss": 0.0276,
233
+ "step": 1300
234
+ },
235
+ {
236
+ "epoch": 11.304347826086957,
237
+ "eval_accuracy": 0.9044117647058824,
238
+ "eval_combined_score": 0.9180552693932214,
239
+ "eval_f1": 0.9316987740805605,
240
+ "eval_loss": 0.7422796487808228,
241
+ "eval_runtime": 0.5862,
242
+ "eval_samples_per_second": 696.003,
243
+ "eval_steps_per_second": 1.706,
244
+ "step": 1300
245
+ },
246
+ {
247
+ "epoch": 12.173913043478262,
248
+ "grad_norm": 0.02271677926182747,
249
+ "learning_rate": 0.0013349959087290495,
250
+ "loss": 0.0306,
251
+ "step": 1400
252
+ },
253
+ {
254
+ "epoch": 12.173913043478262,
255
+ "eval_accuracy": 0.9142156862745098,
256
+ "eval_combined_score": 0.9260243617873437,
257
+ "eval_f1": 0.9378330373001776,
258
+ "eval_loss": 0.5293979048728943,
259
+ "eval_runtime": 0.5891,
260
+ "eval_samples_per_second": 692.594,
261
+ "eval_steps_per_second": 1.698,
262
+ "step": 1400
263
+ },
264
+ {
265
+ "epoch": 13.043478260869565,
266
+ "grad_norm": 1.8397374153137207,
267
+ "learning_rate": 0.001244471486928804,
268
+ "loss": 0.0282,
269
+ "step": 1500
270
+ },
271
+ {
272
+ "epoch": 13.043478260869565,
273
+ "eval_accuracy": 0.9191176470588235,
274
+ "eval_combined_score": 0.9303552837064029,
275
+ "eval_f1": 0.9415929203539823,
276
+ "eval_loss": 0.43192246556282043,
277
+ "eval_runtime": 0.5874,
278
+ "eval_samples_per_second": 694.633,
279
+ "eval_steps_per_second": 1.703,
280
+ "step": 1500
281
+ },
282
+ {
283
+ "epoch": 13.91304347826087,
284
+ "grad_norm": 0.9747097492218018,
285
+ "learning_rate": 0.0011517596676513472,
286
+ "loss": 0.0207,
287
+ "step": 1600
288
+ },
289
+ {
290
+ "epoch": 13.91304347826087,
291
+ "eval_accuracy": 0.9044117647058824,
292
+ "eval_combined_score": 0.917692608016658,
293
+ "eval_f1": 0.9309734513274336,
294
+ "eval_loss": 0.4927181899547577,
295
+ "eval_runtime": 0.5912,
296
+ "eval_samples_per_second": 690.121,
297
+ "eval_steps_per_second": 1.691,
298
+ "step": 1600
299
+ },
300
+ {
301
+ "epoch": 14.782608695652174,
302
+ "grad_norm": 0.00376587244682014,
303
+ "learning_rate": 0.001057689985670419,
304
+ "loss": 0.0153,
305
+ "step": 1700
306
+ },
307
+ {
308
+ "epoch": 14.782608695652174,
309
+ "eval_accuracy": 0.9044117647058824,
310
+ "eval_combined_score": 0.9179352320893208,
311
+ "eval_f1": 0.9314586994727593,
312
+ "eval_loss": 0.8056793808937073,
313
+ "eval_runtime": 0.5879,
314
+ "eval_samples_per_second": 694.04,
315
+ "eval_steps_per_second": 1.701,
316
+ "step": 1700
317
+ },
318
+ {
319
+ "epoch": 15.652173913043478,
320
+ "grad_norm": 7.063637733459473,
321
+ "learning_rate": 0.0009631041251743559,
322
+ "loss": 0.014,
323
+ "step": 1800
324
+ },
325
+ {
326
+ "epoch": 15.652173913043478,
327
+ "eval_accuracy": 0.8995098039215687,
328
+ "eval_combined_score": 0.9142263057736093,
329
+ "eval_f1": 0.92894280762565,
330
+ "eval_loss": 0.6877063512802124,
331
+ "eval_runtime": 0.5883,
332
+ "eval_samples_per_second": 693.581,
333
+ "eval_steps_per_second": 1.7,
334
+ "step": 1800
335
+ },
336
+ {
337
+ "epoch": 16.52173913043478,
338
+ "grad_norm": 0.005422030575573444,
339
+ "learning_rate": 0.0008688483888352111,
340
+ "loss": 0.0127,
341
+ "step": 1900
342
+ },
343
+ {
344
+ "epoch": 16.52173913043478,
345
+ "eval_accuracy": 0.9044117647058824,
346
+ "eval_combined_score": 0.9179352320893208,
347
+ "eval_f1": 0.9314586994727593,
348
+ "eval_loss": 0.8292607069015503,
349
+ "eval_runtime": 0.5877,
350
+ "eval_samples_per_second": 694.209,
351
+ "eval_steps_per_second": 1.701,
352
+ "step": 1900
353
+ },
354
+ {
355
+ "epoch": 17.391304347826086,
356
+ "grad_norm": 0.0020996863022446632,
357
+ "learning_rate": 0.000775766125554205,
358
+ "loss": 0.0143,
359
+ "step": 2000
360
+ },
361
+ {
362
+ "epoch": 17.391304347826086,
363
+ "eval_accuracy": 0.9068627450980392,
364
+ "eval_combined_score": 0.9200980392156863,
365
+ "eval_f1": 0.9333333333333333,
366
+ "eval_loss": 0.7557045221328735,
367
+ "eval_runtime": 0.5871,
368
+ "eval_samples_per_second": 694.999,
369
+ "eval_steps_per_second": 1.703,
370
+ "step": 2000
371
+ },
372
+ {
373
+ "epoch": 18.26086956521739,
374
+ "grad_norm": 0.01305614784359932,
375
+ "learning_rate": 0.0006846901846358999,
376
+ "loss": 0.0146,
377
+ "step": 2100
378
+ },
379
+ {
380
+ "epoch": 18.26086956521739,
381
+ "eval_accuracy": 0.9019607843137255,
382
+ "eval_combined_score": 0.9161371865819499,
383
+ "eval_f1": 0.9303135888501742,
384
+ "eval_loss": 0.6693629622459412,
385
+ "eval_runtime": 0.5871,
386
+ "eval_samples_per_second": 694.989,
387
+ "eval_steps_per_second": 1.703,
388
+ "step": 2100
389
+ },
390
+ {
391
+ "epoch": 19.130434782608695,
392
+ "grad_norm": 0.2836249768733978,
393
+ "learning_rate": 0.0005964354639070397,
394
+ "loss": 0.0071,
395
+ "step": 2200
396
+ },
397
+ {
398
+ "epoch": 19.130434782608695,
399
+ "eval_accuracy": 0.9142156862745098,
400
+ "eval_combined_score": 0.9262436456063907,
401
+ "eval_f1": 0.9382716049382716,
402
+ "eval_loss": 0.6643729209899902,
403
+ "eval_runtime": 0.5924,
404
+ "eval_samples_per_second": 688.697,
405
+ "eval_steps_per_second": 1.688,
406
+ "step": 2200
407
+ },
408
+ {
409
+ "epoch": 20.0,
410
+ "grad_norm": 0.0006859056884422898,
411
+ "learning_rate": 0.0005117916184554203,
412
+ "loss": 0.004,
413
+ "step": 2300
414
+ },
415
+ {
416
+ "epoch": 20.0,
417
+ "eval_accuracy": 0.9068627450980392,
418
+ "eval_combined_score": 0.9202145893322364,
419
+ "eval_f1": 0.9335664335664335,
420
+ "eval_loss": 0.8549216985702515,
421
+ "eval_runtime": 0.5862,
422
+ "eval_samples_per_second": 695.967,
423
+ "eval_steps_per_second": 1.706,
424
+ "step": 2300
425
+ },
426
+ {
427
+ "epoch": 20.869565217391305,
428
+ "grad_norm": 0.00031100164051167667,
429
+ "learning_rate": 0.0004315159952270119,
430
+ "loss": 0.0034,
431
+ "step": 2400
432
+ },
433
+ {
434
+ "epoch": 20.869565217391305,
435
+ "eval_accuracy": 0.9142156862745098,
436
+ "eval_combined_score": 0.9261343918098213,
437
+ "eval_f1": 0.9380530973451328,
438
+ "eval_loss": 0.8290520310401917,
439
+ "eval_runtime": 0.5881,
440
+ "eval_samples_per_second": 693.711,
441
+ "eval_steps_per_second": 1.7,
442
+ "step": 2400
443
+ },
444
+ {
445
+ "epoch": 21.73913043478261,
446
+ "grad_norm": 0.0001474908203817904,
447
+ "learning_rate": 0.0003563268566987077,
448
+ "loss": 0.0026,
449
+ "step": 2500
450
+ },
451
+ {
452
+ "epoch": 21.73913043478261,
453
+ "eval_accuracy": 0.9117647058823529,
454
+ "eval_combined_score": 0.924192212096106,
455
+ "eval_f1": 0.9366197183098591,
456
+ "eval_loss": 0.8578133583068848,
457
+ "eval_runtime": 0.5843,
458
+ "eval_samples_per_second": 698.319,
459
+ "eval_steps_per_second": 1.712,
460
+ "step": 2500
461
+ },
462
+ {
463
+ "epoch": 22.608695652173914,
464
+ "grad_norm": 0.0011402611853554845,
465
+ "learning_rate": 0.0002868969542575783,
466
+ "loss": 0.005,
467
+ "step": 2600
468
+ },
469
+ {
470
+ "epoch": 22.608695652173914,
471
+ "eval_accuracy": 0.9142156862745098,
472
+ "eval_combined_score": 0.9261343918098213,
473
+ "eval_f1": 0.9380530973451328,
474
+ "eval_loss": 0.8785954117774963,
475
+ "eval_runtime": 0.5886,
476
+ "eval_samples_per_second": 693.167,
477
+ "eval_steps_per_second": 1.699,
478
+ "step": 2600
479
+ },
480
+ {
481
+ "epoch": 23.47826086956522,
482
+ "grad_norm": 9.641557699069381e-05,
483
+ "learning_rate": 0.00022384750878852333,
484
+ "loss": 0.0015,
485
+ "step": 2700
486
+ },
487
+ {
488
+ "epoch": 23.47826086956522,
489
+ "eval_accuracy": 0.9191176470588235,
490
+ "eval_combined_score": 0.9304582944288826,
491
+ "eval_f1": 0.9417989417989417,
492
+ "eval_loss": 0.9162914752960205,
493
+ "eval_runtime": 0.6263,
494
+ "eval_samples_per_second": 651.461,
495
+ "eval_steps_per_second": 1.597,
496
+ "step": 2700
497
+ },
498
+ {
499
+ "epoch": 24.347826086956523,
500
+ "grad_norm": 0.000294726574793458,
501
+ "learning_rate": 0.00016774265232874353,
502
+ "loss": 0.006,
503
+ "step": 2800
504
+ },
505
+ {
506
+ "epoch": 24.347826086956523,
507
+ "eval_accuracy": 0.9142156862745098,
508
+ "eval_combined_score": 0.926352131362211,
509
+ "eval_f1": 0.9384885764499121,
510
+ "eval_loss": 0.858831524848938,
511
+ "eval_runtime": 0.5747,
512
+ "eval_samples_per_second": 709.941,
513
+ "eval_steps_per_second": 1.74,
514
+ "step": 2800
515
+ },
516
+ {
517
+ "epoch": 25.217391304347824,
518
+ "grad_norm": 0.0001405712537234649,
519
+ "learning_rate": 0.00011908438052207082,
520
+ "loss": 0.0024,
521
+ "step": 2900
522
+ },
523
+ {
524
+ "epoch": 25.217391304347824,
525
+ "eval_accuracy": 0.9068627450980392,
526
+ "eval_combined_score": 0.9202145893322364,
527
+ "eval_f1": 0.9335664335664335,
528
+ "eval_loss": 0.8538783192634583,
529
+ "eval_runtime": 0.5853,
530
+ "eval_samples_per_second": 697.132,
531
+ "eval_steps_per_second": 1.709,
532
+ "step": 2900
533
+ },
534
+ {
535
+ "epoch": 26.08695652173913,
536
+ "grad_norm": 0.0005564729799516499,
537
+ "learning_rate": 7.830806103584498e-05,
538
+ "loss": 0.0025,
539
+ "step": 3000
540
+ },
541
+ {
542
+ "epoch": 26.08695652173913,
543
+ "eval_accuracy": 0.9068627450980392,
544
+ "eval_combined_score": 0.9202145893322364,
545
+ "eval_f1": 0.9335664335664335,
546
+ "eval_loss": 0.866047203540802,
547
+ "eval_runtime": 0.5862,
548
+ "eval_samples_per_second": 695.983,
549
+ "eval_steps_per_second": 1.706,
550
+ "step": 3000
551
+ },
552
+ {
553
+ "epoch": 26.956521739130434,
554
+ "grad_norm": 8.019042434170842e-05,
555
+ "learning_rate": 4.577853812857102e-05,
556
+ "loss": 0.0016,
557
+ "step": 3100
558
+ },
559
+ {
560
+ "epoch": 26.956521739130434,
561
+ "eval_accuracy": 0.9068627450980392,
562
+ "eval_combined_score": 0.9202145893322364,
563
+ "eval_f1": 0.9335664335664335,
564
+ "eval_loss": 0.8777211308479309,
565
+ "eval_runtime": 0.5876,
566
+ "eval_samples_per_second": 694.315,
567
+ "eval_steps_per_second": 1.702,
568
+ "step": 3100
569
+ },
570
+ {
571
+ "epoch": 27.82608695652174,
572
+ "grad_norm": 0.003190132789313793,
573
+ "learning_rate": 2.178686822255904e-05,
574
+ "loss": 0.0021,
575
+ "step": 3200
576
+ },
577
+ {
578
+ "epoch": 27.82608695652174,
579
+ "eval_accuracy": 0.9142156862745098,
580
+ "eval_combined_score": 0.926352131362211,
581
+ "eval_f1": 0.9384885764499121,
582
+ "eval_loss": 0.8823443651199341,
583
+ "eval_runtime": 0.5835,
584
+ "eval_samples_per_second": 699.276,
585
+ "eval_steps_per_second": 1.714,
586
+ "step": 3200
587
+ },
588
+ {
589
+ "epoch": 28.695652173913043,
590
+ "grad_norm": 0.00853230245411396,
591
+ "learning_rate": 6.547715689861789e-06,
592
+ "loss": 0.0026,
593
+ "step": 3300
594
+ },
595
+ {
596
+ "epoch": 28.695652173913043,
597
+ "eval_accuracy": 0.9142156862745098,
598
+ "eval_combined_score": 0.926352131362211,
599
+ "eval_f1": 0.9384885764499121,
600
+ "eval_loss": 0.8846958875656128,
601
+ "eval_runtime": 0.5855,
602
+ "eval_samples_per_second": 696.884,
603
+ "eval_steps_per_second": 1.708,
604
+ "step": 3300
605
+ },
606
+ {
607
+ "epoch": 29.565217391304348,
608
+ "grad_norm": 0.00021078009740449488,
609
+ "learning_rate": 1.97432152599486e-07,
610
+ "loss": 0.0022,
611
+ "step": 3400
612
+ },
613
+ {
614
+ "epoch": 29.565217391304348,
615
+ "eval_accuracy": 0.9142156862745098,
616
+ "eval_combined_score": 0.926352131362211,
617
+ "eval_f1": 0.9384885764499121,
618
+ "eval_loss": 0.8843898177146912,
619
+ "eval_runtime": 0.5846,
620
+ "eval_samples_per_second": 697.894,
621
+ "eval_steps_per_second": 1.711,
622
+ "step": 3400
623
+ },
624
+ {
625
+ "epoch": 30.0,
626
+ "step": 3450,
627
+ "total_flos": 1.83610838283264e+16,
628
+ "train_loss": 0.0690272841764533,
629
+ "train_runtime": 676.117,
630
+ "train_samples_per_second": 162.753,
631
+ "train_steps_per_second": 5.103
632
+ }
633
+ ],
634
+ "logging_steps": 100,
635
+ "max_steps": 3450,
636
+ "num_input_tokens_seen": 0,
637
+ "num_train_epochs": 30,
638
+ "save_steps": 100,
639
+ "stateful_callbacks": {
640
+ "TrainerControl": {
641
+ "args": {
642
+ "should_epoch_stop": false,
643
+ "should_evaluate": false,
644
+ "should_log": false,
645
+ "should_save": true,
646
+ "should_training_stop": true
647
+ },
648
+ "attributes": {}
649
+ }
650
+ },
651
+ "total_flos": 1.83610838283264e+16,
652
+ "train_batch_size": 32,
653
+ "trial_name": null,
654
+ "trial_params": null
655
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/MRPC.tsv ADDED
@@ -0,0 +1,1726 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ index prediction
2
+ 0 1
3
+ 1 1
4
+ 2 1
5
+ 3 1
6
+ 4 0
7
+ 5 1
8
+ 6 0
9
+ 7 0
10
+ 8 1
11
+ 9 0
12
+ 10 1
13
+ 11 1
14
+ 12 1
15
+ 13 0
16
+ 14 1
17
+ 15 1
18
+ 16 1
19
+ 17 1
20
+ 18 1
21
+ 19 1
22
+ 20 1
23
+ 21 1
24
+ 22 1
25
+ 23 1
26
+ 24 0
27
+ 25 1
28
+ 26 1
29
+ 27 1
30
+ 28 0
31
+ 29 1
32
+ 30 1
33
+ 31 0
34
+ 32 0
35
+ 33 0
36
+ 34 1
37
+ 35 0
38
+ 36 1
39
+ 37 1
40
+ 38 0
41
+ 39 1
42
+ 40 1
43
+ 41 1
44
+ 42 0
45
+ 43 1
46
+ 44 1
47
+ 45 0
48
+ 46 0
49
+ 47 0
50
+ 48 1
51
+ 49 1
52
+ 50 1
53
+ 51 1
54
+ 52 1
55
+ 53 1
56
+ 54 0
57
+ 55 1
58
+ 56 1
59
+ 57 1
60
+ 58 0
61
+ 59 1
62
+ 60 1
63
+ 61 1
64
+ 62 1
65
+ 63 1
66
+ 64 1
67
+ 65 1
68
+ 66 1
69
+ 67 1
70
+ 68 1
71
+ 69 1
72
+ 70 0
73
+ 71 1
74
+ 72 1
75
+ 73 0
76
+ 74 1
77
+ 75 0
78
+ 76 1
79
+ 77 0
80
+ 78 0
81
+ 79 0
82
+ 80 0
83
+ 81 0
84
+ 82 1
85
+ 83 0
86
+ 84 1
87
+ 85 0
88
+ 86 1
89
+ 87 1
90
+ 88 0
91
+ 89 1
92
+ 90 0
93
+ 91 1
94
+ 92 1
95
+ 93 1
96
+ 94 1
97
+ 95 0
98
+ 96 1
99
+ 97 1
100
+ 98 0
101
+ 99 1
102
+ 100 1
103
+ 101 1
104
+ 102 1
105
+ 103 1
106
+ 104 1
107
+ 105 0
108
+ 106 0
109
+ 107 1
110
+ 108 1
111
+ 109 1
112
+ 110 1
113
+ 111 0
114
+ 112 0
115
+ 113 1
116
+ 114 1
117
+ 115 0
118
+ 116 0
119
+ 117 0
120
+ 118 1
121
+ 119 1
122
+ 120 1
123
+ 121 0
124
+ 122 1
125
+ 123 0
126
+ 124 1
127
+ 125 1
128
+ 126 1
129
+ 127 1
130
+ 128 0
131
+ 129 1
132
+ 130 0
133
+ 131 1
134
+ 132 1
135
+ 133 0
136
+ 134 1
137
+ 135 0
138
+ 136 1
139
+ 137 1
140
+ 138 1
141
+ 139 1
142
+ 140 1
143
+ 141 1
144
+ 142 1
145
+ 143 1
146
+ 144 1
147
+ 145 1
148
+ 146 0
149
+ 147 1
150
+ 148 1
151
+ 149 1
152
+ 150 1
153
+ 151 1
154
+ 152 0
155
+ 153 0
156
+ 154 1
157
+ 155 1
158
+ 156 1
159
+ 157 1
160
+ 158 1
161
+ 159 1
162
+ 160 1
163
+ 161 1
164
+ 162 1
165
+ 163 0
166
+ 164 0
167
+ 165 1
168
+ 166 1
169
+ 167 0
170
+ 168 1
171
+ 169 1
172
+ 170 1
173
+ 171 1
174
+ 172 1
175
+ 173 0
176
+ 174 0
177
+ 175 0
178
+ 176 0
179
+ 177 0
180
+ 178 1
181
+ 179 1
182
+ 180 0
183
+ 181 0
184
+ 182 0
185
+ 183 1
186
+ 184 1
187
+ 185 1
188
+ 186 0
189
+ 187 1
190
+ 188 1
191
+ 189 1
192
+ 190 0
193
+ 191 0
194
+ 192 1
195
+ 193 1
196
+ 194 0
197
+ 195 1
198
+ 196 0
199
+ 197 1
200
+ 198 1
201
+ 199 0
202
+ 200 0
203
+ 201 1
204
+ 202 1
205
+ 203 1
206
+ 204 1
207
+ 205 0
208
+ 206 1
209
+ 207 1
210
+ 208 0
211
+ 209 1
212
+ 210 0
213
+ 211 1
214
+ 212 0
215
+ 213 0
216
+ 214 1
217
+ 215 0
218
+ 216 1
219
+ 217 1
220
+ 218 1
221
+ 219 1
222
+ 220 1
223
+ 221 0
224
+ 222 1
225
+ 223 0
226
+ 224 1
227
+ 225 1
228
+ 226 1
229
+ 227 0
230
+ 228 0
231
+ 229 1
232
+ 230 1
233
+ 231 1
234
+ 232 1
235
+ 233 1
236
+ 234 0
237
+ 235 1
238
+ 236 1
239
+ 237 1
240
+ 238 0
241
+ 239 0
242
+ 240 1
243
+ 241 1
244
+ 242 1
245
+ 243 1
246
+ 244 0
247
+ 245 1
248
+ 246 1
249
+ 247 0
250
+ 248 1
251
+ 249 1
252
+ 250 1
253
+ 251 1
254
+ 252 1
255
+ 253 0
256
+ 254 1
257
+ 255 1
258
+ 256 1
259
+ 257 1
260
+ 258 1
261
+ 259 1
262
+ 260 1
263
+ 261 1
264
+ 262 1
265
+ 263 1
266
+ 264 1
267
+ 265 1
268
+ 266 0
269
+ 267 0
270
+ 268 1
271
+ 269 1
272
+ 270 1
273
+ 271 0
274
+ 272 1
275
+ 273 0
276
+ 274 0
277
+ 275 0
278
+ 276 1
279
+ 277 1
280
+ 278 1
281
+ 279 1
282
+ 280 1
283
+ 281 0
284
+ 282 1
285
+ 283 1
286
+ 284 1
287
+ 285 0
288
+ 286 1
289
+ 287 0
290
+ 288 0
291
+ 289 0
292
+ 290 0
293
+ 291 1
294
+ 292 1
295
+ 293 1
296
+ 294 1
297
+ 295 0
298
+ 296 0
299
+ 297 0
300
+ 298 1
301
+ 299 1
302
+ 300 1
303
+ 301 1
304
+ 302 0
305
+ 303 1
306
+ 304 1
307
+ 305 0
308
+ 306 1
309
+ 307 1
310
+ 308 0
311
+ 309 1
312
+ 310 1
313
+ 311 1
314
+ 312 0
315
+ 313 1
316
+ 314 0
317
+ 315 1
318
+ 316 1
319
+ 317 1
320
+ 318 1
321
+ 319 0
322
+ 320 1
323
+ 321 1
324
+ 322 1
325
+ 323 1
326
+ 324 1
327
+ 325 1
328
+ 326 1
329
+ 327 0
330
+ 328 1
331
+ 329 0
332
+ 330 0
333
+ 331 0
334
+ 332 1
335
+ 333 1
336
+ 334 1
337
+ 335 0
338
+ 336 1
339
+ 337 1
340
+ 338 1
341
+ 339 1
342
+ 340 1
343
+ 341 1
344
+ 342 1
345
+ 343 1
346
+ 344 0
347
+ 345 1
348
+ 346 1
349
+ 347 0
350
+ 348 0
351
+ 349 0
352
+ 350 1
353
+ 351 1
354
+ 352 0
355
+ 353 1
356
+ 354 0
357
+ 355 0
358
+ 356 0
359
+ 357 0
360
+ 358 1
361
+ 359 0
362
+ 360 0
363
+ 361 0
364
+ 362 0
365
+ 363 1
366
+ 364 0
367
+ 365 1
368
+ 366 0
369
+ 367 0
370
+ 368 1
371
+ 369 1
372
+ 370 1
373
+ 371 1
374
+ 372 1
375
+ 373 1
376
+ 374 1
377
+ 375 0
378
+ 376 0
379
+ 377 1
380
+ 378 0
381
+ 379 0
382
+ 380 1
383
+ 381 1
384
+ 382 1
385
+ 383 1
386
+ 384 1
387
+ 385 0
388
+ 386 1
389
+ 387 0
390
+ 388 1
391
+ 389 1
392
+ 390 1
393
+ 391 0
394
+ 392 0
395
+ 393 1
396
+ 394 1
397
+ 395 1
398
+ 396 1
399
+ 397 1
400
+ 398 0
401
+ 399 0
402
+ 400 1
403
+ 401 1
404
+ 402 1
405
+ 403 1
406
+ 404 1
407
+ 405 1
408
+ 406 0
409
+ 407 1
410
+ 408 1
411
+ 409 1
412
+ 410 1
413
+ 411 0
414
+ 412 1
415
+ 413 1
416
+ 414 1
417
+ 415 0
418
+ 416 0
419
+ 417 1
420
+ 418 1
421
+ 419 1
422
+ 420 1
423
+ 421 1
424
+ 422 1
425
+ 423 1
426
+ 424 0
427
+ 425 0
428
+ 426 0
429
+ 427 1
430
+ 428 0
431
+ 429 1
432
+ 430 0
433
+ 431 1
434
+ 432 1
435
+ 433 1
436
+ 434 1
437
+ 435 1
438
+ 436 1
439
+ 437 0
440
+ 438 1
441
+ 439 0
442
+ 440 0
443
+ 441 0
444
+ 442 1
445
+ 443 1
446
+ 444 1
447
+ 445 1
448
+ 446 1
449
+ 447 1
450
+ 448 1
451
+ 449 1
452
+ 450 1
453
+ 451 1
454
+ 452 1
455
+ 453 1
456
+ 454 1
457
+ 455 0
458
+ 456 1
459
+ 457 1
460
+ 458 0
461
+ 459 1
462
+ 460 1
463
+ 461 1
464
+ 462 1
465
+ 463 0
466
+ 464 0
467
+ 465 0
468
+ 466 1
469
+ 467 1
470
+ 468 1
471
+ 469 1
472
+ 470 0
473
+ 471 1
474
+ 472 1
475
+ 473 1
476
+ 474 1
477
+ 475 1
478
+ 476 1
479
+ 477 1
480
+ 478 1
481
+ 479 0
482
+ 480 1
483
+ 481 1
484
+ 482 1
485
+ 483 1
486
+ 484 0
487
+ 485 1
488
+ 486 1
489
+ 487 1
490
+ 488 1
491
+ 489 1
492
+ 490 1
493
+ 491 1
494
+ 492 1
495
+ 493 1
496
+ 494 0
497
+ 495 1
498
+ 496 1
499
+ 497 1
500
+ 498 0
501
+ 499 1
502
+ 500 1
503
+ 501 1
504
+ 502 1
505
+ 503 1
506
+ 504 1
507
+ 505 1
508
+ 506 0
509
+ 507 1
510
+ 508 0
511
+ 509 0
512
+ 510 1
513
+ 511 1
514
+ 512 1
515
+ 513 1
516
+ 514 1
517
+ 515 1
518
+ 516 1
519
+ 517 0
520
+ 518 1
521
+ 519 1
522
+ 520 1
523
+ 521 1
524
+ 522 0
525
+ 523 1
526
+ 524 1
527
+ 525 1
528
+ 526 0
529
+ 527 1
530
+ 528 1
531
+ 529 1
532
+ 530 1
533
+ 531 1
534
+ 532 1
535
+ 533 0
536
+ 534 1
537
+ 535 1
538
+ 536 1
539
+ 537 1
540
+ 538 0
541
+ 539 1
542
+ 540 1
543
+ 541 1
544
+ 542 1
545
+ 543 1
546
+ 544 1
547
+ 545 1
548
+ 546 1
549
+ 547 0
550
+ 548 0
551
+ 549 0
552
+ 550 0
553
+ 551 1
554
+ 552 0
555
+ 553 1
556
+ 554 0
557
+ 555 1
558
+ 556 0
559
+ 557 1
560
+ 558 1
561
+ 559 1
562
+ 560 1
563
+ 561 0
564
+ 562 0
565
+ 563 0
566
+ 564 1
567
+ 565 1
568
+ 566 1
569
+ 567 1
570
+ 568 0
571
+ 569 0
572
+ 570 1
573
+ 571 1
574
+ 572 1
575
+ 573 1
576
+ 574 1
577
+ 575 1
578
+ 576 1
579
+ 577 0
580
+ 578 1
581
+ 579 0
582
+ 580 1
583
+ 581 0
584
+ 582 1
585
+ 583 1
586
+ 584 1
587
+ 585 1
588
+ 586 0
589
+ 587 1
590
+ 588 1
591
+ 589 1
592
+ 590 1
593
+ 591 1
594
+ 592 1
595
+ 593 0
596
+ 594 0
597
+ 595 1
598
+ 596 1
599
+ 597 0
600
+ 598 1
601
+ 599 1
602
+ 600 1
603
+ 601 1
604
+ 602 1
605
+ 603 1
606
+ 604 1
607
+ 605 1
608
+ 606 1
609
+ 607 0
610
+ 608 0
611
+ 609 1
612
+ 610 1
613
+ 611 1
614
+ 612 0
615
+ 613 1
616
+ 614 1
617
+ 615 1
618
+ 616 1
619
+ 617 0
620
+ 618 1
621
+ 619 1
622
+ 620 1
623
+ 621 0
624
+ 622 1
625
+ 623 0
626
+ 624 1
627
+ 625 0
628
+ 626 1
629
+ 627 1
630
+ 628 1
631
+ 629 1
632
+ 630 1
633
+ 631 1
634
+ 632 1
635
+ 633 0
636
+ 634 0
637
+ 635 1
638
+ 636 1
639
+ 637 1
640
+ 638 1
641
+ 639 1
642
+ 640 1
643
+ 641 1
644
+ 642 1
645
+ 643 1
646
+ 644 1
647
+ 645 1
648
+ 646 1
649
+ 647 1
650
+ 648 1
651
+ 649 0
652
+ 650 1
653
+ 651 0
654
+ 652 0
655
+ 653 1
656
+ 654 1
657
+ 655 1
658
+ 656 1
659
+ 657 1
660
+ 658 1
661
+ 659 1
662
+ 660 1
663
+ 661 1
664
+ 662 1
665
+ 663 1
666
+ 664 1
667
+ 665 0
668
+ 666 1
669
+ 667 0
670
+ 668 0
671
+ 669 0
672
+ 670 1
673
+ 671 1
674
+ 672 0
675
+ 673 1
676
+ 674 1
677
+ 675 0
678
+ 676 1
679
+ 677 1
680
+ 678 1
681
+ 679 1
682
+ 680 0
683
+ 681 1
684
+ 682 1
685
+ 683 1
686
+ 684 1
687
+ 685 1
688
+ 686 1
689
+ 687 0
690
+ 688 1
691
+ 689 1
692
+ 690 1
693
+ 691 0
694
+ 692 0
695
+ 693 1
696
+ 694 1
697
+ 695 0
698
+ 696 1
699
+ 697 1
700
+ 698 0
701
+ 699 1
702
+ 700 0
703
+ 701 1
704
+ 702 1
705
+ 703 0
706
+ 704 1
707
+ 705 0
708
+ 706 0
709
+ 707 0
710
+ 708 1
711
+ 709 1
712
+ 710 0
713
+ 711 0
714
+ 712 1
715
+ 713 1
716
+ 714 1
717
+ 715 1
718
+ 716 1
719
+ 717 0
720
+ 718 0
721
+ 719 1
722
+ 720 1
723
+ 721 1
724
+ 722 0
725
+ 723 0
726
+ 724 1
727
+ 725 0
728
+ 726 1
729
+ 727 1
730
+ 728 1
731
+ 729 0
732
+ 730 1
733
+ 731 1
734
+ 732 1
735
+ 733 0
736
+ 734 1
737
+ 735 1
738
+ 736 1
739
+ 737 1
740
+ 738 1
741
+ 739 1
742
+ 740 1
743
+ 741 0
744
+ 742 1
745
+ 743 1
746
+ 744 1
747
+ 745 0
748
+ 746 1
749
+ 747 0
750
+ 748 1
751
+ 749 0
752
+ 750 1
753
+ 751 1
754
+ 752 1
755
+ 753 0
756
+ 754 1
757
+ 755 1
758
+ 756 1
759
+ 757 0
760
+ 758 1
761
+ 759 1
762
+ 760 1
763
+ 761 1
764
+ 762 1
765
+ 763 1
766
+ 764 1
767
+ 765 1
768
+ 766 1
769
+ 767 1
770
+ 768 1
771
+ 769 1
772
+ 770 1
773
+ 771 1
774
+ 772 1
775
+ 773 1
776
+ 774 1
777
+ 775 1
778
+ 776 1
779
+ 777 1
780
+ 778 0
781
+ 779 1
782
+ 780 0
783
+ 781 0
784
+ 782 1
785
+ 783 0
786
+ 784 1
787
+ 785 1
788
+ 786 1
789
+ 787 0
790
+ 788 1
791
+ 789 1
792
+ 790 1
793
+ 791 1
794
+ 792 0
795
+ 793 1
796
+ 794 1
797
+ 795 1
798
+ 796 0
799
+ 797 0
800
+ 798 1
801
+ 799 0
802
+ 800 0
803
+ 801 0
804
+ 802 1
805
+ 803 1
806
+ 804 0
807
+ 805 0
808
+ 806 1
809
+ 807 0
810
+ 808 1
811
+ 809 0
812
+ 810 1
813
+ 811 1
814
+ 812 0
815
+ 813 1
816
+ 814 0
817
+ 815 1
818
+ 816 0
819
+ 817 0
820
+ 818 0
821
+ 819 1
822
+ 820 1
823
+ 821 1
824
+ 822 1
825
+ 823 1
826
+ 824 1
827
+ 825 1
828
+ 826 1
829
+ 827 0
830
+ 828 0
831
+ 829 1
832
+ 830 1
833
+ 831 1
834
+ 832 1
835
+ 833 0
836
+ 834 1
837
+ 835 0
838
+ 836 1
839
+ 837 1
840
+ 838 1
841
+ 839 0
842
+ 840 0
843
+ 841 0
844
+ 842 1
845
+ 843 1
846
+ 844 0
847
+ 845 1
848
+ 846 1
849
+ 847 1
850
+ 848 0
851
+ 849 1
852
+ 850 0
853
+ 851 0
854
+ 852 1
855
+ 853 0
856
+ 854 1
857
+ 855 0
858
+ 856 1
859
+ 857 1
860
+ 858 0
861
+ 859 1
862
+ 860 0
863
+ 861 1
864
+ 862 1
865
+ 863 1
866
+ 864 1
867
+ 865 1
868
+ 866 0
869
+ 867 1
870
+ 868 1
871
+ 869 1
872
+ 870 0
873
+ 871 1
874
+ 872 1
875
+ 873 1
876
+ 874 1
877
+ 875 0
878
+ 876 1
879
+ 877 0
880
+ 878 0
881
+ 879 1
882
+ 880 1
883
+ 881 1
884
+ 882 1
885
+ 883 1
886
+ 884 0
887
+ 885 1
888
+ 886 1
889
+ 887 1
890
+ 888 0
891
+ 889 1
892
+ 890 1
893
+ 891 1
894
+ 892 0
895
+ 893 0
896
+ 894 0
897
+ 895 0
898
+ 896 0
899
+ 897 1
900
+ 898 1
901
+ 899 1
902
+ 900 1
903
+ 901 1
904
+ 902 0
905
+ 903 1
906
+ 904 1
907
+ 905 1
908
+ 906 1
909
+ 907 1
910
+ 908 1
911
+ 909 1
912
+ 910 0
913
+ 911 1
914
+ 912 0
915
+ 913 0
916
+ 914 1
917
+ 915 1
918
+ 916 1
919
+ 917 1
920
+ 918 0
921
+ 919 1
922
+ 920 1
923
+ 921 1
924
+ 922 1
925
+ 923 1
926
+ 924 1
927
+ 925 1
928
+ 926 1
929
+ 927 0
930
+ 928 1
931
+ 929 1
932
+ 930 1
933
+ 931 0
934
+ 932 0
935
+ 933 0
936
+ 934 0
937
+ 935 1
938
+ 936 1
939
+ 937 0
940
+ 938 1
941
+ 939 1
942
+ 940 1
943
+ 941 1
944
+ 942 0
945
+ 943 1
946
+ 944 1
947
+ 945 1
948
+ 946 1
949
+ 947 1
950
+ 948 1
951
+ 949 1
952
+ 950 0
953
+ 951 1
954
+ 952 0
955
+ 953 1
956
+ 954 1
957
+ 955 1
958
+ 956 1
959
+ 957 1
960
+ 958 1
961
+ 959 1
962
+ 960 1
963
+ 961 1
964
+ 962 1
965
+ 963 0
966
+ 964 0
967
+ 965 1
968
+ 966 1
969
+ 967 1
970
+ 968 1
971
+ 969 1
972
+ 970 1
973
+ 971 1
974
+ 972 1
975
+ 973 1
976
+ 974 1
977
+ 975 1
978
+ 976 1
979
+ 977 1
980
+ 978 1
981
+ 979 1
982
+ 980 1
983
+ 981 1
984
+ 982 0
985
+ 983 1
986
+ 984 0
987
+ 985 1
988
+ 986 1
989
+ 987 1
990
+ 988 1
991
+ 989 0
992
+ 990 0
993
+ 991 1
994
+ 992 1
995
+ 993 1
996
+ 994 0
997
+ 995 1
998
+ 996 1
999
+ 997 1
1000
+ 998 1
1001
+ 999 1
1002
+ 1000 0
1003
+ 1001 0
1004
+ 1002 0
1005
+ 1003 0
1006
+ 1004 1
1007
+ 1005 0
1008
+ 1006 1
1009
+ 1007 1
1010
+ 1008 1
1011
+ 1009 0
1012
+ 1010 1
1013
+ 1011 0
1014
+ 1012 1
1015
+ 1013 1
1016
+ 1014 1
1017
+ 1015 1
1018
+ 1016 1
1019
+ 1017 1
1020
+ 1018 1
1021
+ 1019 0
1022
+ 1020 1
1023
+ 1021 1
1024
+ 1022 1
1025
+ 1023 0
1026
+ 1024 1
1027
+ 1025 0
1028
+ 1026 0
1029
+ 1027 1
1030
+ 1028 0
1031
+ 1029 1
1032
+ 1030 0
1033
+ 1031 1
1034
+ 1032 1
1035
+ 1033 1
1036
+ 1034 1
1037
+ 1035 0
1038
+ 1036 1
1039
+ 1037 1
1040
+ 1038 1
1041
+ 1039 0
1042
+ 1040 0
1043
+ 1041 1
1044
+ 1042 0
1045
+ 1043 0
1046
+ 1044 1
1047
+ 1045 1
1048
+ 1046 0
1049
+ 1047 1
1050
+ 1048 1
1051
+ 1049 1
1052
+ 1050 1
1053
+ 1051 1
1054
+ 1052 0
1055
+ 1053 1
1056
+ 1054 0
1057
+ 1055 1
1058
+ 1056 1
1059
+ 1057 1
1060
+ 1058 1
1061
+ 1059 1
1062
+ 1060 1
1063
+ 1061 1
1064
+ 1062 1
1065
+ 1063 1
1066
+ 1064 1
1067
+ 1065 1
1068
+ 1066 1
1069
+ 1067 1
1070
+ 1068 0
1071
+ 1069 1
1072
+ 1070 1
1073
+ 1071 1
1074
+ 1072 1
1075
+ 1073 1
1076
+ 1074 1
1077
+ 1075 1
1078
+ 1076 1
1079
+ 1077 1
1080
+ 1078 1
1081
+ 1079 1
1082
+ 1080 0
1083
+ 1081 0
1084
+ 1082 1
1085
+ 1083 1
1086
+ 1084 1
1087
+ 1085 1
1088
+ 1086 1
1089
+ 1087 1
1090
+ 1088 1
1091
+ 1089 1
1092
+ 1090 1
1093
+ 1091 0
1094
+ 1092 1
1095
+ 1093 1
1096
+ 1094 1
1097
+ 1095 1
1098
+ 1096 1
1099
+ 1097 1
1100
+ 1098 1
1101
+ 1099 0
1102
+ 1100 1
1103
+ 1101 1
1104
+ 1102 0
1105
+ 1103 1
1106
+ 1104 1
1107
+ 1105 0
1108
+ 1106 1
1109
+ 1107 0
1110
+ 1108 1
1111
+ 1109 1
1112
+ 1110 0
1113
+ 1111 1
1114
+ 1112 0
1115
+ 1113 0
1116
+ 1114 1
1117
+ 1115 1
1118
+ 1116 0
1119
+ 1117 1
1120
+ 1118 1
1121
+ 1119 1
1122
+ 1120 0
1123
+ 1121 0
1124
+ 1122 1
1125
+ 1123 1
1126
+ 1124 0
1127
+ 1125 1
1128
+ 1126 0
1129
+ 1127 0
1130
+ 1128 1
1131
+ 1129 1
1132
+ 1130 1
1133
+ 1131 1
1134
+ 1132 0
1135
+ 1133 1
1136
+ 1134 0
1137
+ 1135 1
1138
+ 1136 0
1139
+ 1137 1
1140
+ 1138 0
1141
+ 1139 0
1142
+ 1140 0
1143
+ 1141 1
1144
+ 1142 1
1145
+ 1143 1
1146
+ 1144 1
1147
+ 1145 1
1148
+ 1146 1
1149
+ 1147 0
1150
+ 1148 1
1151
+ 1149 1
1152
+ 1150 1
1153
+ 1151 1
1154
+ 1152 1
1155
+ 1153 1
1156
+ 1154 0
1157
+ 1155 1
1158
+ 1156 1
1159
+ 1157 0
1160
+ 1158 0
1161
+ 1159 1
1162
+ 1160 0
1163
+ 1161 0
1164
+ 1162 1
1165
+ 1163 1
1166
+ 1164 1
1167
+ 1165 0
1168
+ 1166 0
1169
+ 1167 1
1170
+ 1168 1
1171
+ 1169 0
1172
+ 1170 1
1173
+ 1171 1
1174
+ 1172 1
1175
+ 1173 1
1176
+ 1174 1
1177
+ 1175 0
1178
+ 1176 0
1179
+ 1177 0
1180
+ 1178 1
1181
+ 1179 1
1182
+ 1180 1
1183
+ 1181 1
1184
+ 1182 1
1185
+ 1183 1
1186
+ 1184 1
1187
+ 1185 1
1188
+ 1186 0
1189
+ 1187 1
1190
+ 1188 1
1191
+ 1189 0
1192
+ 1190 0
1193
+ 1191 0
1194
+ 1192 1
1195
+ 1193 1
1196
+ 1194 1
1197
+ 1195 1
1198
+ 1196 0
1199
+ 1197 1
1200
+ 1198 1
1201
+ 1199 1
1202
+ 1200 1
1203
+ 1201 1
1204
+ 1202 0
1205
+ 1203 1
1206
+ 1204 0
1207
+ 1205 1
1208
+ 1206 0
1209
+ 1207 0
1210
+ 1208 1
1211
+ 1209 1
1212
+ 1210 1
1213
+ 1211 1
1214
+ 1212 1
1215
+ 1213 1
1216
+ 1214 1
1217
+ 1215 1
1218
+ 1216 1
1219
+ 1217 1
1220
+ 1218 1
1221
+ 1219 1
1222
+ 1220 1
1223
+ 1221 0
1224
+ 1222 1
1225
+ 1223 1
1226
+ 1224 0
1227
+ 1225 1
1228
+ 1226 0
1229
+ 1227 1
1230
+ 1228 1
1231
+ 1229 1
1232
+ 1230 1
1233
+ 1231 0
1234
+ 1232 1
1235
+ 1233 1
1236
+ 1234 1
1237
+ 1235 1
1238
+ 1236 1
1239
+ 1237 1
1240
+ 1238 1
1241
+ 1239 1
1242
+ 1240 1
1243
+ 1241 0
1244
+ 1242 1
1245
+ 1243 1
1246
+ 1244 1
1247
+ 1245 1
1248
+ 1246 0
1249
+ 1247 1
1250
+ 1248 1
1251
+ 1249 1
1252
+ 1250 1
1253
+ 1251 1
1254
+ 1252 1
1255
+ 1253 1
1256
+ 1254 1
1257
+ 1255 1
1258
+ 1256 1
1259
+ 1257 1
1260
+ 1258 0
1261
+ 1259 0
1262
+ 1260 1
1263
+ 1261 1
1264
+ 1262 1
1265
+ 1263 1
1266
+ 1264 1
1267
+ 1265 1
1268
+ 1266 1
1269
+ 1267 1
1270
+ 1268 1
1271
+ 1269 1
1272
+ 1270 1
1273
+ 1271 1
1274
+ 1272 0
1275
+ 1273 0
1276
+ 1274 0
1277
+ 1275 1
1278
+ 1276 0
1279
+ 1277 1
1280
+ 1278 0
1281
+ 1279 0
1282
+ 1280 1
1283
+ 1281 0
1284
+ 1282 1
1285
+ 1283 0
1286
+ 1284 1
1287
+ 1285 1
1288
+ 1286 1
1289
+ 1287 0
1290
+ 1288 1
1291
+ 1289 1
1292
+ 1290 1
1293
+ 1291 1
1294
+ 1292 0
1295
+ 1293 0
1296
+ 1294 0
1297
+ 1295 1
1298
+ 1296 1
1299
+ 1297 1
1300
+ 1298 1
1301
+ 1299 1
1302
+ 1300 1
1303
+ 1301 1
1304
+ 1302 1
1305
+ 1303 1
1306
+ 1304 1
1307
+ 1305 0
1308
+ 1306 0
1309
+ 1307 1
1310
+ 1308 1
1311
+ 1309 0
1312
+ 1310 1
1313
+ 1311 1
1314
+ 1312 1
1315
+ 1313 1
1316
+ 1314 1
1317
+ 1315 1
1318
+ 1316 1
1319
+ 1317 1
1320
+ 1318 0
1321
+ 1319 1
1322
+ 1320 1
1323
+ 1321 1
1324
+ 1322 1
1325
+ 1323 1
1326
+ 1324 1
1327
+ 1325 1
1328
+ 1326 1
1329
+ 1327 1
1330
+ 1328 1
1331
+ 1329 0
1332
+ 1330 0
1333
+ 1331 1
1334
+ 1332 1
1335
+ 1333 0
1336
+ 1334 1
1337
+ 1335 0
1338
+ 1336 1
1339
+ 1337 0
1340
+ 1338 0
1341
+ 1339 1
1342
+ 1340 0
1343
+ 1341 0
1344
+ 1342 0
1345
+ 1343 0
1346
+ 1344 1
1347
+ 1345 1
1348
+ 1346 1
1349
+ 1347 1
1350
+ 1348 1
1351
+ 1349 1
1352
+ 1350 1
1353
+ 1351 1
1354
+ 1352 1
1355
+ 1353 0
1356
+ 1354 0
1357
+ 1355 0
1358
+ 1356 1
1359
+ 1357 1
1360
+ 1358 0
1361
+ 1359 1
1362
+ 1360 1
1363
+ 1361 1
1364
+ 1362 1
1365
+ 1363 0
1366
+ 1364 1
1367
+ 1365 1
1368
+ 1366 0
1369
+ 1367 0
1370
+ 1368 1
1371
+ 1369 1
1372
+ 1370 0
1373
+ 1371 0
1374
+ 1372 1
1375
+ 1373 1
1376
+ 1374 0
1377
+ 1375 1
1378
+ 1376 0
1379
+ 1377 1
1380
+ 1378 1
1381
+ 1379 1
1382
+ 1380 1
1383
+ 1381 1
1384
+ 1382 1
1385
+ 1383 1
1386
+ 1384 1
1387
+ 1385 1
1388
+ 1386 0
1389
+ 1387 0
1390
+ 1388 1
1391
+ 1389 1
1392
+ 1390 1
1393
+ 1391 0
1394
+ 1392 0
1395
+ 1393 1
1396
+ 1394 1
1397
+ 1395 1
1398
+ 1396 0
1399
+ 1397 1
1400
+ 1398 1
1401
+ 1399 1
1402
+ 1400 0
1403
+ 1401 0
1404
+ 1402 1
1405
+ 1403 0
1406
+ 1404 1
1407
+ 1405 0
1408
+ 1406 1
1409
+ 1407 1
1410
+ 1408 1
1411
+ 1409 1
1412
+ 1410 0
1413
+ 1411 1
1414
+ 1412 1
1415
+ 1413 1
1416
+ 1414 0
1417
+ 1415 1
1418
+ 1416 1
1419
+ 1417 0
1420
+ 1418 1
1421
+ 1419 1
1422
+ 1420 1
1423
+ 1421 1
1424
+ 1422 0
1425
+ 1423 0
1426
+ 1424 0
1427
+ 1425 1
1428
+ 1426 0
1429
+ 1427 1
1430
+ 1428 1
1431
+ 1429 1
1432
+ 1430 0
1433
+ 1431 1
1434
+ 1432 1
1435
+ 1433 1
1436
+ 1434 1
1437
+ 1435 0
1438
+ 1436 1
1439
+ 1437 1
1440
+ 1438 1
1441
+ 1439 1
1442
+ 1440 1
1443
+ 1441 1
1444
+ 1442 1
1445
+ 1443 0
1446
+ 1444 0
1447
+ 1445 1
1448
+ 1446 1
1449
+ 1447 1
1450
+ 1448 0
1451
+ 1449 1
1452
+ 1450 1
1453
+ 1451 0
1454
+ 1452 1
1455
+ 1453 1
1456
+ 1454 1
1457
+ 1455 1
1458
+ 1456 1
1459
+ 1457 1
1460
+ 1458 1
1461
+ 1459 1
1462
+ 1460 1
1463
+ 1461 1
1464
+ 1462 1
1465
+ 1463 0
1466
+ 1464 1
1467
+ 1465 1
1468
+ 1466 1
1469
+ 1467 0
1470
+ 1468 1
1471
+ 1469 0
1472
+ 1470 1
1473
+ 1471 1
1474
+ 1472 0
1475
+ 1473 0
1476
+ 1474 1
1477
+ 1475 0
1478
+ 1476 0
1479
+ 1477 1
1480
+ 1478 1
1481
+ 1479 0
1482
+ 1480 1
1483
+ 1481 0
1484
+ 1482 1
1485
+ 1483 0
1486
+ 1484 0
1487
+ 1485 1
1488
+ 1486 0
1489
+ 1487 0
1490
+ 1488 1
1491
+ 1489 1
1492
+ 1490 0
1493
+ 1491 1
1494
+ 1492 1
1495
+ 1493 1
1496
+ 1494 0
1497
+ 1495 1
1498
+ 1496 1
1499
+ 1497 0
1500
+ 1498 0
1501
+ 1499 0
1502
+ 1500 1
1503
+ 1501 1
1504
+ 1502 1
1505
+ 1503 0
1506
+ 1504 1
1507
+ 1505 1
1508
+ 1506 1
1509
+ 1507 1
1510
+ 1508 1
1511
+ 1509 0
1512
+ 1510 1
1513
+ 1511 1
1514
+ 1512 1
1515
+ 1513 1
1516
+ 1514 0
1517
+ 1515 1
1518
+ 1516 0
1519
+ 1517 0
1520
+ 1518 0
1521
+ 1519 0
1522
+ 1520 0
1523
+ 1521 0
1524
+ 1522 0
1525
+ 1523 1
1526
+ 1524 1
1527
+ 1525 1
1528
+ 1526 1
1529
+ 1527 1
1530
+ 1528 0
1531
+ 1529 1
1532
+ 1530 1
1533
+ 1531 1
1534
+ 1532 1
1535
+ 1533 1
1536
+ 1534 1
1537
+ 1535 0
1538
+ 1536 1
1539
+ 1537 1
1540
+ 1538 1
1541
+ 1539 0
1542
+ 1540 0
1543
+ 1541 1
1544
+ 1542 0
1545
+ 1543 1
1546
+ 1544 0
1547
+ 1545 0
1548
+ 1546 1
1549
+ 1547 0
1550
+ 1548 1
1551
+ 1549 1
1552
+ 1550 1
1553
+ 1551 0
1554
+ 1552 0
1555
+ 1553 1
1556
+ 1554 1
1557
+ 1555 1
1558
+ 1556 0
1559
+ 1557 1
1560
+ 1558 0
1561
+ 1559 1
1562
+ 1560 1
1563
+ 1561 0
1564
+ 1562 1
1565
+ 1563 1
1566
+ 1564 0
1567
+ 1565 1
1568
+ 1566 1
1569
+ 1567 1
1570
+ 1568 1
1571
+ 1569 1
1572
+ 1570 0
1573
+ 1571 1
1574
+ 1572 0
1575
+ 1573 1
1576
+ 1574 1
1577
+ 1575 1
1578
+ 1576 1
1579
+ 1577 0
1580
+ 1578 1
1581
+ 1579 1
1582
+ 1580 1
1583
+ 1581 1
1584
+ 1582 1
1585
+ 1583 1
1586
+ 1584 0
1587
+ 1585 1
1588
+ 1586 0
1589
+ 1587 1
1590
+ 1588 1
1591
+ 1589 0
1592
+ 1590 0
1593
+ 1591 1
1594
+ 1592 1
1595
+ 1593 0
1596
+ 1594 1
1597
+ 1595 1
1598
+ 1596 1
1599
+ 1597 0
1600
+ 1598 0
1601
+ 1599 1
1602
+ 1600 0
1603
+ 1601 1
1604
+ 1602 1
1605
+ 1603 1
1606
+ 1604 0
1607
+ 1605 0
1608
+ 1606 1
1609
+ 1607 1
1610
+ 1608 0
1611
+ 1609 1
1612
+ 1610 0
1613
+ 1611 1
1614
+ 1612 1
1615
+ 1613 1
1616
+ 1614 1
1617
+ 1615 1
1618
+ 1616 0
1619
+ 1617 0
1620
+ 1618 1
1621
+ 1619 1
1622
+ 1620 0
1623
+ 1621 0
1624
+ 1622 0
1625
+ 1623 1
1626
+ 1624 0
1627
+ 1625 0
1628
+ 1626 1
1629
+ 1627 1
1630
+ 1628 1
1631
+ 1629 0
1632
+ 1630 0
1633
+ 1631 1
1634
+ 1632 1
1635
+ 1633 1
1636
+ 1634 1
1637
+ 1635 1
1638
+ 1636 1
1639
+ 1637 1
1640
+ 1638 0
1641
+ 1639 0
1642
+ 1640 0
1643
+ 1641 1
1644
+ 1642 0
1645
+ 1643 1
1646
+ 1644 1
1647
+ 1645 1
1648
+ 1646 1
1649
+ 1647 0
1650
+ 1648 1
1651
+ 1649 1
1652
+ 1650 1
1653
+ 1651 0
1654
+ 1652 0
1655
+ 1653 1
1656
+ 1654 0
1657
+ 1655 1
1658
+ 1656 0
1659
+ 1657 0
1660
+ 1658 1
1661
+ 1659 0
1662
+ 1660 0
1663
+ 1661 1
1664
+ 1662 0
1665
+ 1663 1
1666
+ 1664 0
1667
+ 1665 0
1668
+ 1666 0
1669
+ 1667 1
1670
+ 1668 0
1671
+ 1669 1
1672
+ 1670 1
1673
+ 1671 0
1674
+ 1672 1
1675
+ 1673 1
1676
+ 1674 1
1677
+ 1675 0
1678
+ 1676 1
1679
+ 1677 1
1680
+ 1678 1
1681
+ 1679 1
1682
+ 1680 0
1683
+ 1681 1
1684
+ 1682 1
1685
+ 1683 0
1686
+ 1684 1
1687
+ 1685 1
1688
+ 1686 0
1689
+ 1687 0
1690
+ 1688 1
1691
+ 1689 1
1692
+ 1690 1
1693
+ 1691 0
1694
+ 1692 1
1695
+ 1693 1
1696
+ 1694 1
1697
+ 1695 1
1698
+ 1696 1
1699
+ 1697 0
1700
+ 1698 0
1701
+ 1699 1
1702
+ 1700 0
1703
+ 1701 1
1704
+ 1702 0
1705
+ 1703 1
1706
+ 1704 1
1707
+ 1705 1
1708
+ 1706 1
1709
+ 1707 0
1710
+ 1708 1
1711
+ 1709 1
1712
+ 1710 1
1713
+ 1711 1
1714
+ 1712 1
1715
+ 1713 0
1716
+ 1714 1
1717
+ 1715 1
1718
+ 1716 1
1719
+ 1717 1
1720
+ 1718 1
1721
+ 1719 1
1722
+ 1720 0
1723
+ 1721 0
1724
+ 1722 0
1725
+ 1723 1
1726
+ 1724 1
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/all_results.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 30.0,
3
+ "eval_accuracy": 0.9166666666666666,
4
+ "eval_combined_score": 0.9282979976442873,
5
+ "eval_f1": 0.9399293286219081,
6
+ "eval_loss": 0.4379878342151642,
7
+ "eval_runtime": 0.59,
8
+ "eval_samples": 408,
9
+ "eval_samples_per_second": 691.558,
10
+ "eval_steps_per_second": 1.695
11
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/eval_results.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 30.0,
3
+ "eval_accuracy": 0.9166666666666666,
4
+ "eval_combined_score": 0.9282979976442873,
5
+ "eval_f1": 0.9399293286219081,
6
+ "eval_loss": 0.4379878342151642,
7
+ "eval_runtime": 0.59,
8
+ "eval_samples": 408,
9
+ "eval_samples_per_second": 691.558,
10
+ "eval_steps_per_second": 1.695
11
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "T": 1.0,
3
+ "base_model_name_or_path": "microsoft/deberta-v3-base",
4
+ "bias": "none",
5
+ "drop_out": 0.0,
6
+ "inference_mode": false,
7
+ "layers_to_transform": null,
8
+ "modules_to_save": [
9
+ "classifier",
10
+ "pooler"
11
+ ],
12
+ "num_rotations": 1,
13
+ "peft_type": "ROTATION",
14
+ "r": 4,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "value_proj",
18
+ "intermediate.dense",
19
+ "output.dense",
20
+ "query_proj",
21
+ "key_proj",
22
+ "attention.output.dense"
23
+ ],
24
+ "target_modules_to_skip": null,
25
+ "task_type": "SEQ_CLS"
26
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[MASK]": 128000
3
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[SEP]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": {
9
+ "content": "[UNK]",
10
+ "lstrip": false,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ }
15
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
3
+ size 2464616
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft/tokenizer_config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[CLS]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[SEP]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[UNK]",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128000": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "bos_token": "[CLS]",
45
+ "clean_up_tokenization_spaces": false,
46
+ "cls_token": "[CLS]",
47
+ "do_lower_case": false,
48
+ "eos_token": "[SEP]",
49
+ "extra_special_tokens": {},
50
+ "mask_token": "[MASK]",
51
+ "model_max_length": 512,
52
+ "pad_token": "[PAD]",
53
+ "padding_side": "right",
54
+ "sep_token": "[SEP]",
55
+ "sp_model_kwargs": {},
56
+ "split_by_punct": false,
57
+ "tokenizer_class": "DebertaV2Tokenizer",
58
+ "unk_token": "[UNK]",
59
+ "vocab_type": "spm"
60
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft2/adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "T": 1.0,
3
+ "base_model_name_or_path": "microsoft/deberta-v3-base",
4
+ "bias": "none",
5
+ "drop_out": 0.0,
6
+ "inference_mode": true,
7
+ "layers_to_transform": null,
8
+ "modules_to_save": [
9
+ "classifier",
10
+ "pooler"
11
+ ],
12
+ "num_rotations": 1,
13
+ "peft_type": "ROTATION",
14
+ "r": 4,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "value_proj",
18
+ "intermediate.dense",
19
+ "output.dense",
20
+ "query_proj",
21
+ "key_proj",
22
+ "attention.output.dense"
23
+ ],
24
+ "target_modules_to_skip": null,
25
+ "task_type": "SEQ_CLS"
26
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/ft2/adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:659c1db1180853a7b1ca2995cf400ac5c4a565e09751b0a064bcbc98d55a42c0
3
+ size 7449859
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/trainer_state.json ADDED
@@ -0,0 +1,655 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 800,
3
+ "best_metric": 0.9166666666666666,
4
+ "best_model_checkpoint": "./glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h43m47,r=4,s44/checkpoint-800",
5
+ "epoch": 30.0,
6
+ "eval_steps": 100,
7
+ "global_step": 3450,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.8695652173913043,
14
+ "grad_norm": 1.8360604047775269,
15
+ "learning_rate": 0.00198,
16
+ "loss": 0.5914,
17
+ "step": 100
18
+ },
19
+ {
20
+ "epoch": 0.8695652173913043,
21
+ "eval_accuracy": 0.7818627450980392,
22
+ "eval_combined_score": 0.8212913099512105,
23
+ "eval_f1": 0.8607198748043818,
24
+ "eval_loss": 0.49459952116012573,
25
+ "eval_runtime": 0.6854,
26
+ "eval_samples_per_second": 595.272,
27
+ "eval_steps_per_second": 1.459,
28
+ "step": 100
29
+ },
30
+ {
31
+ "epoch": 1.7391304347826086,
32
+ "grad_norm": 2.4401156902313232,
33
+ "learning_rate": 0.0019956152348614225,
34
+ "loss": 0.4002,
35
+ "step": 200
36
+ },
37
+ {
38
+ "epoch": 1.7391304347826086,
39
+ "eval_accuracy": 0.8431372549019608,
40
+ "eval_combined_score": 0.868937048503612,
41
+ "eval_f1": 0.8947368421052632,
42
+ "eval_loss": 0.48239845037460327,
43
+ "eval_runtime": 0.5905,
44
+ "eval_samples_per_second": 690.894,
45
+ "eval_steps_per_second": 1.693,
46
+ "step": 200
47
+ },
48
+ {
49
+ "epoch": 2.608695652173913,
50
+ "grad_norm": 3.450094223022461,
51
+ "learning_rate": 0.0019823226955326743,
52
+ "loss": 0.2987,
53
+ "step": 300
54
+ },
55
+ {
56
+ "epoch": 2.608695652173913,
57
+ "eval_accuracy": 0.8799019607843137,
58
+ "eval_combined_score": 0.8980706385118149,
59
+ "eval_f1": 0.9162393162393162,
60
+ "eval_loss": 0.31535765528678894,
61
+ "eval_runtime": 0.5903,
62
+ "eval_samples_per_second": 691.194,
63
+ "eval_steps_per_second": 1.694,
64
+ "step": 300
65
+ },
66
+ {
67
+ "epoch": 3.4782608695652173,
68
+ "grad_norm": 2.1748127937316895,
69
+ "learning_rate": 0.0019602408686963785,
70
+ "loss": 0.2165,
71
+ "step": 400
72
+ },
73
+ {
74
+ "epoch": 3.4782608695652173,
75
+ "eval_accuracy": 0.8872549019607843,
76
+ "eval_combined_score": 0.9038350634371395,
77
+ "eval_f1": 0.9204152249134948,
78
+ "eval_loss": 0.35315826535224915,
79
+ "eval_runtime": 0.5883,
80
+ "eval_samples_per_second": 693.519,
81
+ "eval_steps_per_second": 1.7,
82
+ "step": 400
83
+ },
84
+ {
85
+ "epoch": 4.3478260869565215,
86
+ "grad_norm": 1.631756067276001,
87
+ "learning_rate": 0.0019295673304908422,
88
+ "loss": 0.1806,
89
+ "step": 500
90
+ },
91
+ {
92
+ "epoch": 4.3478260869565215,
93
+ "eval_accuracy": 0.8651960784313726,
94
+ "eval_combined_score": 0.8865343876243965,
95
+ "eval_f1": 0.9078726968174204,
96
+ "eval_loss": 0.433360755443573,
97
+ "eval_runtime": 0.5864,
98
+ "eval_samples_per_second": 695.779,
99
+ "eval_steps_per_second": 1.705,
100
+ "step": 500
101
+ },
102
+ {
103
+ "epoch": 5.217391304347826,
104
+ "grad_norm": 3.0931460857391357,
105
+ "learning_rate": 0.001890576530999922,
106
+ "loss": 0.1185,
107
+ "step": 600
108
+ },
109
+ {
110
+ "epoch": 5.217391304347826,
111
+ "eval_accuracy": 0.8970588235294118,
112
+ "eval_combined_score": 0.9112953692115144,
113
+ "eval_f1": 0.925531914893617,
114
+ "eval_loss": 0.5076855421066284,
115
+ "eval_runtime": 0.5875,
116
+ "eval_samples_per_second": 694.515,
117
+ "eval_steps_per_second": 1.702,
118
+ "step": 600
119
+ },
120
+ {
121
+ "epoch": 6.086956521739131,
122
+ "grad_norm": 7.7334113121032715,
123
+ "learning_rate": 0.0018436173386234143,
124
+ "loss": 0.1018,
125
+ "step": 700
126
+ },
127
+ {
128
+ "epoch": 6.086956521739131,
129
+ "eval_accuracy": 0.8455882352941176,
130
+ "eval_combined_score": 0.8710699797160244,
131
+ "eval_f1": 0.896551724137931,
132
+ "eval_loss": 0.7612046599388123,
133
+ "eval_runtime": 0.5881,
134
+ "eval_samples_per_second": 693.813,
135
+ "eval_steps_per_second": 1.701,
136
+ "step": 700
137
+ },
138
+ {
139
+ "epoch": 6.956521739130435,
140
+ "grad_norm": 7.458433628082275,
141
+ "learning_rate": 0.001789109918592965,
142
+ "loss": 0.0663,
143
+ "step": 800
144
+ },
145
+ {
146
+ "epoch": 6.956521739130435,
147
+ "eval_accuracy": 0.9166666666666666,
148
+ "eval_combined_score": 0.9282979976442873,
149
+ "eval_f1": 0.9399293286219081,
150
+ "eval_loss": 0.4379878342151642,
151
+ "eval_runtime": 0.5904,
152
+ "eval_samples_per_second": 691.039,
153
+ "eval_steps_per_second": 1.694,
154
+ "step": 800
155
+ },
156
+ {
157
+ "epoch": 7.826086956521739,
158
+ "grad_norm": 0.46179747581481934,
159
+ "learning_rate": 0.001727541973562826,
160
+ "loss": 0.0584,
161
+ "step": 900
162
+ },
163
+ {
164
+ "epoch": 7.826086956521739,
165
+ "eval_accuracy": 0.8897058823529411,
166
+ "eval_combined_score": 0.9061266072694137,
167
+ "eval_f1": 0.9225473321858864,
168
+ "eval_loss": 0.6456283926963806,
169
+ "eval_runtime": 0.5878,
170
+ "eval_samples_per_second": 694.099,
171
+ "eval_steps_per_second": 1.701,
172
+ "step": 900
173
+ },
174
+ {
175
+ "epoch": 8.695652173913043,
176
+ "grad_norm": 0.026321955025196075,
177
+ "learning_rate": 0.001659464379912601,
178
+ "loss": 0.0478,
179
+ "step": 1000
180
+ },
181
+ {
182
+ "epoch": 8.695652173913043,
183
+ "eval_accuracy": 0.8872549019607843,
184
+ "eval_combined_score": 0.9029914085775652,
185
+ "eval_f1": 0.9187279151943463,
186
+ "eval_loss": 0.6597155332565308,
187
+ "eval_runtime": 0.591,
188
+ "eval_samples_per_second": 690.371,
189
+ "eval_steps_per_second": 1.692,
190
+ "step": 1000
191
+ },
192
+ {
193
+ "epoch": 9.565217391304348,
194
+ "grad_norm": 1.8554913997650146,
195
+ "learning_rate": 0.0015854862588059726,
196
+ "loss": 0.0457,
197
+ "step": 1100
198
+ },
199
+ {
200
+ "epoch": 9.565217391304348,
201
+ "eval_accuracy": 0.9019607843137255,
202
+ "eval_combined_score": 0.91500916913528,
203
+ "eval_f1": 0.9280575539568345,
204
+ "eval_loss": 0.5039987564086914,
205
+ "eval_runtime": 0.5869,
206
+ "eval_samples_per_second": 695.126,
207
+ "eval_steps_per_second": 1.704,
208
+ "step": 1100
209
+ },
210
+ {
211
+ "epoch": 10.434782608695652,
212
+ "grad_norm": 1.1184793710708618,
213
+ "learning_rate": 0.0015062695261068735,
214
+ "loss": 0.0326,
215
+ "step": 1200
216
+ },
217
+ {
218
+ "epoch": 10.434782608695652,
219
+ "eval_accuracy": 0.9019607843137255,
220
+ "eval_combined_score": 0.9152661064425771,
221
+ "eval_f1": 0.9285714285714286,
222
+ "eval_loss": 0.6569485664367676,
223
+ "eval_runtime": 0.5902,
224
+ "eval_samples_per_second": 691.244,
225
+ "eval_steps_per_second": 1.694,
226
+ "step": 1200
227
+ },
228
+ {
229
+ "epoch": 11.304347826086957,
230
+ "grad_norm": 5.948911666870117,
231
+ "learning_rate": 0.0014225229699174897,
232
+ "loss": 0.0397,
233
+ "step": 1300
234
+ },
235
+ {
236
+ "epoch": 11.304347826086957,
237
+ "eval_accuracy": 0.9019607843137255,
238
+ "eval_combined_score": 0.9136669593210418,
239
+ "eval_f1": 0.9253731343283582,
240
+ "eval_loss": 0.6822256445884705,
241
+ "eval_runtime": 0.5855,
242
+ "eval_samples_per_second": 696.804,
243
+ "eval_steps_per_second": 1.708,
244
+ "step": 1300
245
+ },
246
+ {
247
+ "epoch": 12.173913043478262,
248
+ "grad_norm": 0.024277156218886375,
249
+ "learning_rate": 0.0013349959087290495,
250
+ "loss": 0.0297,
251
+ "step": 1400
252
+ },
253
+ {
254
+ "epoch": 12.173913043478262,
255
+ "eval_accuracy": 0.8970588235294118,
256
+ "eval_combined_score": 0.911029411764706,
257
+ "eval_f1": 0.925,
258
+ "eval_loss": 0.661496102809906,
259
+ "eval_runtime": 0.5892,
260
+ "eval_samples_per_second": 692.509,
261
+ "eval_steps_per_second": 1.697,
262
+ "step": 1400
263
+ },
264
+ {
265
+ "epoch": 13.043478260869565,
266
+ "grad_norm": 0.0037559494376182556,
267
+ "learning_rate": 0.001244471486928804,
268
+ "loss": 0.0262,
269
+ "step": 1500
270
+ },
271
+ {
272
+ "epoch": 13.043478260869565,
273
+ "eval_accuracy": 0.9019607843137255,
274
+ "eval_combined_score": 0.9153932035447632,
275
+ "eval_f1": 0.9288256227758007,
276
+ "eval_loss": 0.6715577840805054,
277
+ "eval_runtime": 0.5906,
278
+ "eval_samples_per_second": 690.767,
279
+ "eval_steps_per_second": 1.693,
280
+ "step": 1500
281
+ },
282
+ {
283
+ "epoch": 13.91304347826087,
284
+ "grad_norm": 1.3978617191314697,
285
+ "learning_rate": 0.0011517596676513472,
286
+ "loss": 0.0191,
287
+ "step": 1600
288
+ },
289
+ {
290
+ "epoch": 13.91304347826087,
291
+ "eval_accuracy": 0.8897058823529411,
292
+ "eval_combined_score": 0.9058581404832297,
293
+ "eval_f1": 0.9220103986135182,
294
+ "eval_loss": 0.6576076745986938,
295
+ "eval_runtime": 0.5782,
296
+ "eval_samples_per_second": 705.656,
297
+ "eval_steps_per_second": 1.73,
298
+ "step": 1600
299
+ },
300
+ {
301
+ "epoch": 14.782608695652174,
302
+ "grad_norm": 0.014365606009960175,
303
+ "learning_rate": 0.001057689985670419,
304
+ "loss": 0.0234,
305
+ "step": 1700
306
+ },
307
+ {
308
+ "epoch": 14.782608695652174,
309
+ "eval_accuracy": 0.8946078431372549,
310
+ "eval_combined_score": 0.9092508242234947,
311
+ "eval_f1": 0.9238938053097345,
312
+ "eval_loss": 0.641814649105072,
313
+ "eval_runtime": 0.5743,
314
+ "eval_samples_per_second": 710.406,
315
+ "eval_steps_per_second": 1.741,
316
+ "step": 1700
317
+ },
318
+ {
319
+ "epoch": 15.652173913043478,
320
+ "grad_norm": 0.007329825311899185,
321
+ "learning_rate": 0.0009631041251743559,
322
+ "loss": 0.0233,
323
+ "step": 1800
324
+ },
325
+ {
326
+ "epoch": 15.652173913043478,
327
+ "eval_accuracy": 0.9093137254901961,
328
+ "eval_combined_score": 0.9219134999132397,
329
+ "eval_f1": 0.9345132743362832,
330
+ "eval_loss": 0.5932002067565918,
331
+ "eval_runtime": 0.5875,
332
+ "eval_samples_per_second": 694.434,
333
+ "eval_steps_per_second": 1.702,
334
+ "step": 1800
335
+ },
336
+ {
337
+ "epoch": 16.52173913043478,
338
+ "grad_norm": 3.096741199493408,
339
+ "learning_rate": 0.0008688483888352111,
340
+ "loss": 0.0101,
341
+ "step": 1900
342
+ },
343
+ {
344
+ "epoch": 16.52173913043478,
345
+ "eval_accuracy": 0.8970588235294118,
346
+ "eval_combined_score": 0.9114269382664727,
347
+ "eval_f1": 0.9257950530035336,
348
+ "eval_loss": 0.7496433854103088,
349
+ "eval_runtime": 0.5893,
350
+ "eval_samples_per_second": 692.33,
351
+ "eval_steps_per_second": 1.697,
352
+ "step": 1900
353
+ },
354
+ {
355
+ "epoch": 17.391304347826086,
356
+ "grad_norm": 3.4940972328186035,
357
+ "learning_rate": 0.000775766125554205,
358
+ "loss": 0.0154,
359
+ "step": 2000
360
+ },
361
+ {
362
+ "epoch": 17.391304347826086,
363
+ "eval_accuracy": 0.8995098039215687,
364
+ "eval_combined_score": 0.9143490297673473,
365
+ "eval_f1": 0.9291882556131261,
366
+ "eval_loss": 0.7340260148048401,
367
+ "eval_runtime": 0.5876,
368
+ "eval_samples_per_second": 694.4,
369
+ "eval_steps_per_second": 1.702,
370
+ "step": 2000
371
+ },
372
+ {
373
+ "epoch": 18.26086956521739,
374
+ "grad_norm": 0.021862367168068886,
375
+ "learning_rate": 0.0006846901846358999,
376
+ "loss": 0.0077,
377
+ "step": 2100
378
+ },
379
+ {
380
+ "epoch": 18.26086956521739,
381
+ "eval_accuracy": 0.9019607843137255,
382
+ "eval_combined_score": 0.9158926728586172,
383
+ "eval_f1": 0.9298245614035088,
384
+ "eval_loss": 0.7796983122825623,
385
+ "eval_runtime": 0.5951,
386
+ "eval_samples_per_second": 685.546,
387
+ "eval_steps_per_second": 1.68,
388
+ "step": 2100
389
+ },
390
+ {
391
+ "epoch": 19.130434782608695,
392
+ "grad_norm": 0.4798244833946228,
393
+ "learning_rate": 0.0005964354639070397,
394
+ "loss": 0.0131,
395
+ "step": 2200
396
+ },
397
+ {
398
+ "epoch": 19.130434782608695,
399
+ "eval_accuracy": 0.9166666666666666,
400
+ "eval_combined_score": 0.9280842230130486,
401
+ "eval_f1": 0.9395017793594306,
402
+ "eval_loss": 0.616762638092041,
403
+ "eval_runtime": 0.5925,
404
+ "eval_samples_per_second": 688.63,
405
+ "eval_steps_per_second": 1.688,
406
+ "step": 2200
407
+ },
408
+ {
409
+ "epoch": 20.0,
410
+ "grad_norm": 0.0014620067086070776,
411
+ "learning_rate": 0.0005117916184554203,
412
+ "loss": 0.0056,
413
+ "step": 2300
414
+ },
415
+ {
416
+ "epoch": 20.0,
417
+ "eval_accuracy": 0.9068627450980392,
418
+ "eval_combined_score": 0.9200980392156863,
419
+ "eval_f1": 0.9333333333333333,
420
+ "eval_loss": 0.8317950367927551,
421
+ "eval_runtime": 0.5854,
422
+ "eval_samples_per_second": 697.003,
423
+ "eval_steps_per_second": 1.708,
424
+ "step": 2300
425
+ },
426
+ {
427
+ "epoch": 20.869565217391305,
428
+ "grad_norm": 0.03271247446537018,
429
+ "learning_rate": 0.0004315159952270119,
430
+ "loss": 0.0035,
431
+ "step": 2400
432
+ },
433
+ {
434
+ "epoch": 20.869565217391305,
435
+ "eval_accuracy": 0.9019607843137255,
436
+ "eval_combined_score": 0.9155193992490613,
437
+ "eval_f1": 0.9290780141843972,
438
+ "eval_loss": 0.877530038356781,
439
+ "eval_runtime": 0.5888,
440
+ "eval_samples_per_second": 692.978,
441
+ "eval_steps_per_second": 1.698,
442
+ "step": 2400
443
+ },
444
+ {
445
+ "epoch": 21.73913043478261,
446
+ "grad_norm": 0.0165140051394701,
447
+ "learning_rate": 0.0003563268566987077,
448
+ "loss": 0.0038,
449
+ "step": 2500
450
+ },
451
+ {
452
+ "epoch": 21.73913043478261,
453
+ "eval_accuracy": 0.9093137254901961,
454
+ "eval_combined_score": 0.9223706498306128,
455
+ "eval_f1": 0.9354275741710296,
456
+ "eval_loss": 0.827203631401062,
457
+ "eval_runtime": 0.5885,
458
+ "eval_samples_per_second": 693.274,
459
+ "eval_steps_per_second": 1.699,
460
+ "step": 2500
461
+ },
462
+ {
463
+ "epoch": 22.608695652173914,
464
+ "grad_norm": 0.0022780098952353,
465
+ "learning_rate": 0.0002868969542575783,
466
+ "loss": 0.0042,
467
+ "step": 2600
468
+ },
469
+ {
470
+ "epoch": 22.608695652173914,
471
+ "eval_accuracy": 0.9068627450980392,
472
+ "eval_combined_score": 0.9200980392156863,
473
+ "eval_f1": 0.9333333333333333,
474
+ "eval_loss": 0.8698594570159912,
475
+ "eval_runtime": 0.6076,
476
+ "eval_samples_per_second": 671.512,
477
+ "eval_steps_per_second": 1.646,
478
+ "step": 2600
479
+ },
480
+ {
481
+ "epoch": 23.47826086956522,
482
+ "grad_norm": 0.007629064377397299,
483
+ "learning_rate": 0.00022384750878852333,
484
+ "loss": 0.0031,
485
+ "step": 2700
486
+ },
487
+ {
488
+ "epoch": 23.47826086956522,
489
+ "eval_accuracy": 0.9019607843137255,
490
+ "eval_combined_score": 0.9160153571918278,
491
+ "eval_f1": 0.9300699300699301,
492
+ "eval_loss": 0.9649954438209534,
493
+ "eval_runtime": 0.5903,
494
+ "eval_samples_per_second": 691.22,
495
+ "eval_steps_per_second": 1.694,
496
+ "step": 2700
497
+ },
498
+ {
499
+ "epoch": 24.347826086956523,
500
+ "grad_norm": 0.16465626657009125,
501
+ "learning_rate": 0.00016774265232874353,
502
+ "loss": 0.003,
503
+ "step": 2800
504
+ },
505
+ {
506
+ "epoch": 24.347826086956523,
507
+ "eval_accuracy": 0.9068627450980392,
508
+ "eval_combined_score": 0.9204452614379085,
509
+ "eval_f1": 0.9340277777777778,
510
+ "eval_loss": 1.0196751356124878,
511
+ "eval_runtime": 0.5876,
512
+ "eval_samples_per_second": 694.325,
513
+ "eval_steps_per_second": 1.702,
514
+ "step": 2800
515
+ },
516
+ {
517
+ "epoch": 25.217391304347824,
518
+ "grad_norm": 0.000333487696480006,
519
+ "learning_rate": 0.00011908438052207082,
520
+ "loss": 0.0054,
521
+ "step": 2900
522
+ },
523
+ {
524
+ "epoch": 25.217391304347824,
525
+ "eval_accuracy": 0.9093137254901961,
526
+ "eval_combined_score": 0.9221436817257659,
527
+ "eval_f1": 0.9349736379613357,
528
+ "eval_loss": 0.971728503704071,
529
+ "eval_runtime": 0.5913,
530
+ "eval_samples_per_second": 690.03,
531
+ "eval_steps_per_second": 1.691,
532
+ "step": 2900
533
+ },
534
+ {
535
+ "epoch": 26.08695652173913,
536
+ "grad_norm": 0.0011938128154724836,
537
+ "learning_rate": 7.830806103584498e-05,
538
+ "loss": 0.0025,
539
+ "step": 3000
540
+ },
541
+ {
542
+ "epoch": 26.08695652173913,
543
+ "eval_accuracy": 0.9093137254901961,
544
+ "eval_combined_score": 0.9221436817257659,
545
+ "eval_f1": 0.9349736379613357,
546
+ "eval_loss": 0.9873009324073792,
547
+ "eval_runtime": 0.5881,
548
+ "eval_samples_per_second": 693.708,
549
+ "eval_steps_per_second": 1.7,
550
+ "step": 3000
551
+ },
552
+ {
553
+ "epoch": 26.956521739130434,
554
+ "grad_norm": 0.004215817432850599,
555
+ "learning_rate": 4.577853812857102e-05,
556
+ "loss": 0.0015,
557
+ "step": 3100
558
+ },
559
+ {
560
+ "epoch": 26.956521739130434,
561
+ "eval_accuracy": 0.9093137254901961,
562
+ "eval_combined_score": 0.9222575632704921,
563
+ "eval_f1": 0.9352014010507881,
564
+ "eval_loss": 1.000651240348816,
565
+ "eval_runtime": 0.5964,
566
+ "eval_samples_per_second": 684.119,
567
+ "eval_steps_per_second": 1.677,
568
+ "step": 3100
569
+ },
570
+ {
571
+ "epoch": 27.82608695652174,
572
+ "grad_norm": 0.022620199248194695,
573
+ "learning_rate": 2.178686822255904e-05,
574
+ "loss": 0.0015,
575
+ "step": 3200
576
+ },
577
+ {
578
+ "epoch": 27.82608695652174,
579
+ "eval_accuracy": 0.9093137254901961,
580
+ "eval_combined_score": 0.9222575632704921,
581
+ "eval_f1": 0.9352014010507881,
582
+ "eval_loss": 1.0103635787963867,
583
+ "eval_runtime": 0.5893,
584
+ "eval_samples_per_second": 692.364,
585
+ "eval_steps_per_second": 1.697,
586
+ "step": 3200
587
+ },
588
+ {
589
+ "epoch": 28.695652173913043,
590
+ "grad_norm": 0.0005388130084611475,
591
+ "learning_rate": 6.547715689861789e-06,
592
+ "loss": 0.0006,
593
+ "step": 3300
594
+ },
595
+ {
596
+ "epoch": 28.695652173913043,
597
+ "eval_accuracy": 0.9093137254901961,
598
+ "eval_combined_score": 0.9222575632704921,
599
+ "eval_f1": 0.9352014010507881,
600
+ "eval_loss": 1.0093270540237427,
601
+ "eval_runtime": 0.5869,
602
+ "eval_samples_per_second": 695.149,
603
+ "eval_steps_per_second": 1.704,
604
+ "step": 3300
605
+ },
606
+ {
607
+ "epoch": 29.565217391304348,
608
+ "grad_norm": 0.004557831212878227,
609
+ "learning_rate": 1.97432152599486e-07,
610
+ "loss": 0.0013,
611
+ "step": 3400
612
+ },
613
+ {
614
+ "epoch": 29.565217391304348,
615
+ "eval_accuracy": 0.9093137254901961,
616
+ "eval_combined_score": 0.9222575632704921,
617
+ "eval_f1": 0.9352014010507881,
618
+ "eval_loss": 1.0097448825836182,
619
+ "eval_runtime": 0.5963,
620
+ "eval_samples_per_second": 684.233,
621
+ "eval_steps_per_second": 1.677,
622
+ "step": 3400
623
+ },
624
+ {
625
+ "epoch": 30.0,
626
+ "step": 3450,
627
+ "total_flos": 1.83610838283264e+16,
628
+ "train_loss": 0.06965703106876733,
629
+ "train_runtime": 684.2534,
630
+ "train_samples_per_second": 160.818,
631
+ "train_steps_per_second": 5.042
632
+ }
633
+ ],
634
+ "logging_steps": 100,
635
+ "max_steps": 3450,
636
+ "num_input_tokens_seen": 0,
637
+ "num_train_epochs": 30,
638
+ "save_steps": 100,
639
+ "stateful_callbacks": {
640
+ "TrainerControl": {
641
+ "args": {
642
+ "should_epoch_stop": false,
643
+ "should_evaluate": false,
644
+ "should_log": false,
645
+ "should_save": true,
646
+ "should_training_stop": true
647
+ },
648
+ "attributes": {}
649
+ }
650
+ },
651
+ "total_flos": 1.83610838283264e+16,
652
+ "train_batch_size": 32,
653
+ "trial_name": null,
654
+ "trial_params": null
655
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/MRPC.tsv ADDED
@@ -0,0 +1,1726 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ index prediction
2
+ 0 1
3
+ 1 1
4
+ 2 1
5
+ 3 1
6
+ 4 0
7
+ 5 1
8
+ 6 0
9
+ 7 1
10
+ 8 1
11
+ 9 0
12
+ 10 1
13
+ 11 1
14
+ 12 1
15
+ 13 0
16
+ 14 1
17
+ 15 1
18
+ 16 1
19
+ 17 1
20
+ 18 1
21
+ 19 1
22
+ 20 1
23
+ 21 1
24
+ 22 1
25
+ 23 1
26
+ 24 0
27
+ 25 1
28
+ 26 1
29
+ 27 1
30
+ 28 1
31
+ 29 1
32
+ 30 1
33
+ 31 0
34
+ 32 0
35
+ 33 0
36
+ 34 0
37
+ 35 0
38
+ 36 1
39
+ 37 1
40
+ 38 0
41
+ 39 1
42
+ 40 1
43
+ 41 1
44
+ 42 0
45
+ 43 1
46
+ 44 1
47
+ 45 0
48
+ 46 1
49
+ 47 0
50
+ 48 1
51
+ 49 1
52
+ 50 1
53
+ 51 1
54
+ 52 1
55
+ 53 1
56
+ 54 0
57
+ 55 1
58
+ 56 1
59
+ 57 1
60
+ 58 0
61
+ 59 1
62
+ 60 1
63
+ 61 1
64
+ 62 1
65
+ 63 1
66
+ 64 1
67
+ 65 1
68
+ 66 1
69
+ 67 1
70
+ 68 1
71
+ 69 1
72
+ 70 0
73
+ 71 1
74
+ 72 0
75
+ 73 0
76
+ 74 1
77
+ 75 0
78
+ 76 1
79
+ 77 1
80
+ 78 1
81
+ 79 0
82
+ 80 1
83
+ 81 0
84
+ 82 1
85
+ 83 0
86
+ 84 0
87
+ 85 0
88
+ 86 1
89
+ 87 1
90
+ 88 0
91
+ 89 1
92
+ 90 0
93
+ 91 1
94
+ 92 1
95
+ 93 1
96
+ 94 1
97
+ 95 0
98
+ 96 1
99
+ 97 1
100
+ 98 0
101
+ 99 1
102
+ 100 1
103
+ 101 0
104
+ 102 1
105
+ 103 1
106
+ 104 1
107
+ 105 0
108
+ 106 0
109
+ 107 1
110
+ 108 1
111
+ 109 1
112
+ 110 1
113
+ 111 0
114
+ 112 0
115
+ 113 1
116
+ 114 1
117
+ 115 0
118
+ 116 0
119
+ 117 1
120
+ 118 1
121
+ 119 1
122
+ 120 1
123
+ 121 0
124
+ 122 1
125
+ 123 1
126
+ 124 1
127
+ 125 1
128
+ 126 1
129
+ 127 1
130
+ 128 0
131
+ 129 1
132
+ 130 0
133
+ 131 1
134
+ 132 1
135
+ 133 0
136
+ 134 1
137
+ 135 0
138
+ 136 1
139
+ 137 1
140
+ 138 1
141
+ 139 1
142
+ 140 1
143
+ 141 1
144
+ 142 1
145
+ 143 1
146
+ 144 1
147
+ 145 1
148
+ 146 0
149
+ 147 1
150
+ 148 1
151
+ 149 1
152
+ 150 1
153
+ 151 1
154
+ 152 0
155
+ 153 0
156
+ 154 1
157
+ 155 1
158
+ 156 1
159
+ 157 1
160
+ 158 1
161
+ 159 1
162
+ 160 1
163
+ 161 1
164
+ 162 1
165
+ 163 0
166
+ 164 0
167
+ 165 1
168
+ 166 1
169
+ 167 0
170
+ 168 1
171
+ 169 1
172
+ 170 1
173
+ 171 1
174
+ 172 1
175
+ 173 0
176
+ 174 1
177
+ 175 0
178
+ 176 1
179
+ 177 0
180
+ 178 1
181
+ 179 1
182
+ 180 0
183
+ 181 0
184
+ 182 0
185
+ 183 1
186
+ 184 1
187
+ 185 1
188
+ 186 0
189
+ 187 1
190
+ 188 1
191
+ 189 1
192
+ 190 0
193
+ 191 0
194
+ 192 1
195
+ 193 1
196
+ 194 0
197
+ 195 0
198
+ 196 1
199
+ 197 1
200
+ 198 1
201
+ 199 0
202
+ 200 0
203
+ 201 1
204
+ 202 1
205
+ 203 1
206
+ 204 0
207
+ 205 0
208
+ 206 1
209
+ 207 1
210
+ 208 0
211
+ 209 1
212
+ 210 0
213
+ 211 0
214
+ 212 0
215
+ 213 0
216
+ 214 1
217
+ 215 0
218
+ 216 1
219
+ 217 1
220
+ 218 1
221
+ 219 1
222
+ 220 1
223
+ 221 0
224
+ 222 1
225
+ 223 0
226
+ 224 1
227
+ 225 1
228
+ 226 1
229
+ 227 1
230
+ 228 0
231
+ 229 1
232
+ 230 1
233
+ 231 1
234
+ 232 1
235
+ 233 1
236
+ 234 0
237
+ 235 0
238
+ 236 1
239
+ 237 1
240
+ 238 0
241
+ 239 0
242
+ 240 1
243
+ 241 1
244
+ 242 1
245
+ 243 1
246
+ 244 0
247
+ 245 1
248
+ 246 1
249
+ 247 1
250
+ 248 1
251
+ 249 1
252
+ 250 1
253
+ 251 1
254
+ 252 0
255
+ 253 1
256
+ 254 1
257
+ 255 1
258
+ 256 1
259
+ 257 1
260
+ 258 1
261
+ 259 1
262
+ 260 1
263
+ 261 1
264
+ 262 1
265
+ 263 1
266
+ 264 1
267
+ 265 1
268
+ 266 1
269
+ 267 0
270
+ 268 1
271
+ 269 1
272
+ 270 1
273
+ 271 0
274
+ 272 1
275
+ 273 0
276
+ 274 0
277
+ 275 0
278
+ 276 0
279
+ 277 1
280
+ 278 1
281
+ 279 1
282
+ 280 1
283
+ 281 0
284
+ 282 1
285
+ 283 1
286
+ 284 1
287
+ 285 0
288
+ 286 1
289
+ 287 0
290
+ 288 0
291
+ 289 0
292
+ 290 0
293
+ 291 1
294
+ 292 1
295
+ 293 1
296
+ 294 1
297
+ 295 0
298
+ 296 0
299
+ 297 0
300
+ 298 1
301
+ 299 1
302
+ 300 1
303
+ 301 1
304
+ 302 0
305
+ 303 1
306
+ 304 1
307
+ 305 0
308
+ 306 1
309
+ 307 1
310
+ 308 0
311
+ 309 1
312
+ 310 1
313
+ 311 1
314
+ 312 0
315
+ 313 1
316
+ 314 1
317
+ 315 1
318
+ 316 1
319
+ 317 1
320
+ 318 1
321
+ 319 0
322
+ 320 1
323
+ 321 1
324
+ 322 1
325
+ 323 1
326
+ 324 1
327
+ 325 1
328
+ 326 1
329
+ 327 0
330
+ 328 0
331
+ 329 0
332
+ 330 0
333
+ 331 0
334
+ 332 1
335
+ 333 1
336
+ 334 1
337
+ 335 0
338
+ 336 1
339
+ 337 1
340
+ 338 1
341
+ 339 1
342
+ 340 1
343
+ 341 1
344
+ 342 1
345
+ 343 1
346
+ 344 0
347
+ 345 1
348
+ 346 1
349
+ 347 0
350
+ 348 1
351
+ 349 1
352
+ 350 1
353
+ 351 1
354
+ 352 0
355
+ 353 1
356
+ 354 0
357
+ 355 0
358
+ 356 0
359
+ 357 0
360
+ 358 1
361
+ 359 0
362
+ 360 0
363
+ 361 0
364
+ 362 1
365
+ 363 1
366
+ 364 0
367
+ 365 1
368
+ 366 0
369
+ 367 0
370
+ 368 1
371
+ 369 0
372
+ 370 1
373
+ 371 1
374
+ 372 1
375
+ 373 1
376
+ 374 1
377
+ 375 0
378
+ 376 0
379
+ 377 1
380
+ 378 0
381
+ 379 0
382
+ 380 1
383
+ 381 1
384
+ 382 1
385
+ 383 1
386
+ 384 1
387
+ 385 0
388
+ 386 1
389
+ 387 0
390
+ 388 1
391
+ 389 1
392
+ 390 1
393
+ 391 0
394
+ 392 0
395
+ 393 1
396
+ 394 1
397
+ 395 1
398
+ 396 1
399
+ 397 1
400
+ 398 0
401
+ 399 1
402
+ 400 1
403
+ 401 0
404
+ 402 0
405
+ 403 1
406
+ 404 1
407
+ 405 0
408
+ 406 0
409
+ 407 1
410
+ 408 1
411
+ 409 0
412
+ 410 0
413
+ 411 0
414
+ 412 1
415
+ 413 1
416
+ 414 1
417
+ 415 0
418
+ 416 0
419
+ 417 1
420
+ 418 1
421
+ 419 1
422
+ 420 1
423
+ 421 1
424
+ 422 1
425
+ 423 1
426
+ 424 0
427
+ 425 0
428
+ 426 0
429
+ 427 1
430
+ 428 0
431
+ 429 1
432
+ 430 0
433
+ 431 1
434
+ 432 1
435
+ 433 1
436
+ 434 1
437
+ 435 1
438
+ 436 1
439
+ 437 0
440
+ 438 1
441
+ 439 0
442
+ 440 0
443
+ 441 0
444
+ 442 1
445
+ 443 1
446
+ 444 1
447
+ 445 1
448
+ 446 1
449
+ 447 1
450
+ 448 1
451
+ 449 1
452
+ 450 0
453
+ 451 1
454
+ 452 1
455
+ 453 1
456
+ 454 1
457
+ 455 0
458
+ 456 1
459
+ 457 1
460
+ 458 0
461
+ 459 1
462
+ 460 1
463
+ 461 1
464
+ 462 1
465
+ 463 0
466
+ 464 0
467
+ 465 0
468
+ 466 1
469
+ 467 1
470
+ 468 1
471
+ 469 1
472
+ 470 0
473
+ 471 1
474
+ 472 1
475
+ 473 1
476
+ 474 1
477
+ 475 1
478
+ 476 1
479
+ 477 1
480
+ 478 1
481
+ 479 1
482
+ 480 0
483
+ 481 1
484
+ 482 1
485
+ 483 1
486
+ 484 0
487
+ 485 1
488
+ 486 1
489
+ 487 1
490
+ 488 1
491
+ 489 1
492
+ 490 1
493
+ 491 1
494
+ 492 1
495
+ 493 1
496
+ 494 0
497
+ 495 1
498
+ 496 1
499
+ 497 1
500
+ 498 0
501
+ 499 1
502
+ 500 1
503
+ 501 1
504
+ 502 1
505
+ 503 0
506
+ 504 1
507
+ 505 1
508
+ 506 0
509
+ 507 1
510
+ 508 0
511
+ 509 0
512
+ 510 1
513
+ 511 1
514
+ 512 0
515
+ 513 1
516
+ 514 1
517
+ 515 1
518
+ 516 1
519
+ 517 0
520
+ 518 1
521
+ 519 1
522
+ 520 1
523
+ 521 0
524
+ 522 1
525
+ 523 1
526
+ 524 1
527
+ 525 1
528
+ 526 0
529
+ 527 1
530
+ 528 1
531
+ 529 0
532
+ 530 1
533
+ 531 1
534
+ 532 1
535
+ 533 0
536
+ 534 1
537
+ 535 1
538
+ 536 1
539
+ 537 0
540
+ 538 0
541
+ 539 1
542
+ 540 0
543
+ 541 1
544
+ 542 1
545
+ 543 1
546
+ 544 1
547
+ 545 1
548
+ 546 1
549
+ 547 0
550
+ 548 0
551
+ 549 0
552
+ 550 1
553
+ 551 1
554
+ 552 1
555
+ 553 1
556
+ 554 0
557
+ 555 1
558
+ 556 0
559
+ 557 1
560
+ 558 1
561
+ 559 1
562
+ 560 1
563
+ 561 0
564
+ 562 0
565
+ 563 0
566
+ 564 0
567
+ 565 1
568
+ 566 1
569
+ 567 1
570
+ 568 0
571
+ 569 0
572
+ 570 1
573
+ 571 1
574
+ 572 1
575
+ 573 1
576
+ 574 1
577
+ 575 1
578
+ 576 1
579
+ 577 0
580
+ 578 1
581
+ 579 1
582
+ 580 1
583
+ 581 0
584
+ 582 1
585
+ 583 1
586
+ 584 1
587
+ 585 1
588
+ 586 0
589
+ 587 1
590
+ 588 1
591
+ 589 1
592
+ 590 1
593
+ 591 1
594
+ 592 1
595
+ 593 0
596
+ 594 0
597
+ 595 1
598
+ 596 1
599
+ 597 0
600
+ 598 1
601
+ 599 1
602
+ 600 1
603
+ 601 1
604
+ 602 1
605
+ 603 1
606
+ 604 1
607
+ 605 1
608
+ 606 0
609
+ 607 1
610
+ 608 0
611
+ 609 1
612
+ 610 1
613
+ 611 1
614
+ 612 0
615
+ 613 1
616
+ 614 1
617
+ 615 1
618
+ 616 1
619
+ 617 0
620
+ 618 1
621
+ 619 1
622
+ 620 1
623
+ 621 0
624
+ 622 1
625
+ 623 0
626
+ 624 1
627
+ 625 0
628
+ 626 1
629
+ 627 1
630
+ 628 0
631
+ 629 1
632
+ 630 0
633
+ 631 1
634
+ 632 1
635
+ 633 0
636
+ 634 0
637
+ 635 1
638
+ 636 0
639
+ 637 1
640
+ 638 1
641
+ 639 1
642
+ 640 1
643
+ 641 1
644
+ 642 1
645
+ 643 1
646
+ 644 1
647
+ 645 1
648
+ 646 1
649
+ 647 1
650
+ 648 0
651
+ 649 1
652
+ 650 1
653
+ 651 0
654
+ 652 0
655
+ 653 1
656
+ 654 1
657
+ 655 1
658
+ 656 1
659
+ 657 1
660
+ 658 1
661
+ 659 1
662
+ 660 1
663
+ 661 1
664
+ 662 1
665
+ 663 1
666
+ 664 1
667
+ 665 0
668
+ 666 1
669
+ 667 0
670
+ 668 0
671
+ 669 0
672
+ 670 1
673
+ 671 1
674
+ 672 0
675
+ 673 1
676
+ 674 1
677
+ 675 0
678
+ 676 1
679
+ 677 1
680
+ 678 1
681
+ 679 1
682
+ 680 0
683
+ 681 1
684
+ 682 1
685
+ 683 1
686
+ 684 1
687
+ 685 1
688
+ 686 1
689
+ 687 0
690
+ 688 1
691
+ 689 1
692
+ 690 1
693
+ 691 0
694
+ 692 0
695
+ 693 1
696
+ 694 0
697
+ 695 0
698
+ 696 1
699
+ 697 1
700
+ 698 0
701
+ 699 1
702
+ 700 0
703
+ 701 1
704
+ 702 1
705
+ 703 0
706
+ 704 1
707
+ 705 0
708
+ 706 0
709
+ 707 0
710
+ 708 1
711
+ 709 1
712
+ 710 0
713
+ 711 0
714
+ 712 1
715
+ 713 1
716
+ 714 1
717
+ 715 1
718
+ 716 1
719
+ 717 0
720
+ 718 1
721
+ 719 1
722
+ 720 1
723
+ 721 1
724
+ 722 0
725
+ 723 0
726
+ 724 1
727
+ 725 1
728
+ 726 1
729
+ 727 1
730
+ 728 1
731
+ 729 0
732
+ 730 1
733
+ 731 1
734
+ 732 1
735
+ 733 0
736
+ 734 1
737
+ 735 1
738
+ 736 1
739
+ 737 1
740
+ 738 1
741
+ 739 1
742
+ 740 1
743
+ 741 1
744
+ 742 1
745
+ 743 1
746
+ 744 1
747
+ 745 0
748
+ 746 1
749
+ 747 0
750
+ 748 1
751
+ 749 0
752
+ 750 1
753
+ 751 1
754
+ 752 1
755
+ 753 0
756
+ 754 1
757
+ 755 1
758
+ 756 1
759
+ 757 0
760
+ 758 1
761
+ 759 1
762
+ 760 1
763
+ 761 1
764
+ 762 1
765
+ 763 1
766
+ 764 1
767
+ 765 1
768
+ 766 1
769
+ 767 1
770
+ 768 1
771
+ 769 0
772
+ 770 1
773
+ 771 1
774
+ 772 1
775
+ 773 1
776
+ 774 1
777
+ 775 1
778
+ 776 0
779
+ 777 1
780
+ 778 0
781
+ 779 1
782
+ 780 0
783
+ 781 0
784
+ 782 1
785
+ 783 0
786
+ 784 0
787
+ 785 1
788
+ 786 1
789
+ 787 0
790
+ 788 1
791
+ 789 1
792
+ 790 1
793
+ 791 1
794
+ 792 0
795
+ 793 1
796
+ 794 1
797
+ 795 1
798
+ 796 0
799
+ 797 0
800
+ 798 1
801
+ 799 0
802
+ 800 0
803
+ 801 0
804
+ 802 1
805
+ 803 1
806
+ 804 0
807
+ 805 0
808
+ 806 1
809
+ 807 0
810
+ 808 1
811
+ 809 0
812
+ 810 1
813
+ 811 1
814
+ 812 0
815
+ 813 1
816
+ 814 0
817
+ 815 1
818
+ 816 0
819
+ 817 0
820
+ 818 0
821
+ 819 1
822
+ 820 0
823
+ 821 1
824
+ 822 1
825
+ 823 1
826
+ 824 1
827
+ 825 1
828
+ 826 1
829
+ 827 0
830
+ 828 0
831
+ 829 1
832
+ 830 1
833
+ 831 1
834
+ 832 1
835
+ 833 0
836
+ 834 1
837
+ 835 0
838
+ 836 1
839
+ 837 1
840
+ 838 0
841
+ 839 0
842
+ 840 0
843
+ 841 0
844
+ 842 1
845
+ 843 1
846
+ 844 0
847
+ 845 1
848
+ 846 1
849
+ 847 1
850
+ 848 0
851
+ 849 1
852
+ 850 1
853
+ 851 1
854
+ 852 1
855
+ 853 0
856
+ 854 1
857
+ 855 0
858
+ 856 1
859
+ 857 1
860
+ 858 0
861
+ 859 1
862
+ 860 0
863
+ 861 1
864
+ 862 1
865
+ 863 1
866
+ 864 1
867
+ 865 1
868
+ 866 0
869
+ 867 1
870
+ 868 1
871
+ 869 1
872
+ 870 0
873
+ 871 1
874
+ 872 1
875
+ 873 1
876
+ 874 0
877
+ 875 0
878
+ 876 1
879
+ 877 0
880
+ 878 0
881
+ 879 0
882
+ 880 1
883
+ 881 1
884
+ 882 1
885
+ 883 0
886
+ 884 1
887
+ 885 1
888
+ 886 1
889
+ 887 1
890
+ 888 0
891
+ 889 1
892
+ 890 1
893
+ 891 1
894
+ 892 0
895
+ 893 0
896
+ 894 0
897
+ 895 0
898
+ 896 0
899
+ 897 1
900
+ 898 1
901
+ 899 1
902
+ 900 1
903
+ 901 1
904
+ 902 0
905
+ 903 1
906
+ 904 1
907
+ 905 1
908
+ 906 1
909
+ 907 1
910
+ 908 1
911
+ 909 1
912
+ 910 0
913
+ 911 1
914
+ 912 1
915
+ 913 0
916
+ 914 1
917
+ 915 1
918
+ 916 1
919
+ 917 1
920
+ 918 0
921
+ 919 1
922
+ 920 1
923
+ 921 1
924
+ 922 1
925
+ 923 0
926
+ 924 1
927
+ 925 1
928
+ 926 1
929
+ 927 0
930
+ 928 1
931
+ 929 1
932
+ 930 1
933
+ 931 0
934
+ 932 0
935
+ 933 1
936
+ 934 0
937
+ 935 1
938
+ 936 1
939
+ 937 0
940
+ 938 1
941
+ 939 1
942
+ 940 1
943
+ 941 1
944
+ 942 0
945
+ 943 1
946
+ 944 1
947
+ 945 1
948
+ 946 1
949
+ 947 1
950
+ 948 1
951
+ 949 1
952
+ 950 0
953
+ 951 1
954
+ 952 0
955
+ 953 1
956
+ 954 1
957
+ 955 1
958
+ 956 1
959
+ 957 1
960
+ 958 1
961
+ 959 1
962
+ 960 1
963
+ 961 1
964
+ 962 1
965
+ 963 0
966
+ 964 0
967
+ 965 1
968
+ 966 1
969
+ 967 1
970
+ 968 1
971
+ 969 1
972
+ 970 1
973
+ 971 0
974
+ 972 0
975
+ 973 1
976
+ 974 1
977
+ 975 1
978
+ 976 1
979
+ 977 1
980
+ 978 1
981
+ 979 1
982
+ 980 1
983
+ 981 1
984
+ 982 0
985
+ 983 1
986
+ 984 0
987
+ 985 1
988
+ 986 1
989
+ 987 0
990
+ 988 1
991
+ 989 0
992
+ 990 1
993
+ 991 1
994
+ 992 1
995
+ 993 1
996
+ 994 0
997
+ 995 0
998
+ 996 1
999
+ 997 1
1000
+ 998 1
1001
+ 999 1
1002
+ 1000 0
1003
+ 1001 1
1004
+ 1002 0
1005
+ 1003 0
1006
+ 1004 1
1007
+ 1005 1
1008
+ 1006 1
1009
+ 1007 1
1010
+ 1008 1
1011
+ 1009 0
1012
+ 1010 1
1013
+ 1011 0
1014
+ 1012 1
1015
+ 1013 1
1016
+ 1014 1
1017
+ 1015 1
1018
+ 1016 1
1019
+ 1017 1
1020
+ 1018 1
1021
+ 1019 1
1022
+ 1020 1
1023
+ 1021 1
1024
+ 1022 1
1025
+ 1023 0
1026
+ 1024 1
1027
+ 1025 0
1028
+ 1026 0
1029
+ 1027 1
1030
+ 1028 0
1031
+ 1029 1
1032
+ 1030 0
1033
+ 1031 1
1034
+ 1032 1
1035
+ 1033 1
1036
+ 1034 1
1037
+ 1035 0
1038
+ 1036 1
1039
+ 1037 1
1040
+ 1038 1
1041
+ 1039 0
1042
+ 1040 0
1043
+ 1041 1
1044
+ 1042 0
1045
+ 1043 0
1046
+ 1044 1
1047
+ 1045 1
1048
+ 1046 0
1049
+ 1047 1
1050
+ 1048 1
1051
+ 1049 1
1052
+ 1050 1
1053
+ 1051 1
1054
+ 1052 0
1055
+ 1053 1
1056
+ 1054 0
1057
+ 1055 1
1058
+ 1056 1
1059
+ 1057 1
1060
+ 1058 1
1061
+ 1059 1
1062
+ 1060 0
1063
+ 1061 1
1064
+ 1062 1
1065
+ 1063 1
1066
+ 1064 1
1067
+ 1065 1
1068
+ 1066 1
1069
+ 1067 1
1070
+ 1068 0
1071
+ 1069 1
1072
+ 1070 1
1073
+ 1071 0
1074
+ 1072 1
1075
+ 1073 1
1076
+ 1074 1
1077
+ 1075 1
1078
+ 1076 1
1079
+ 1077 1
1080
+ 1078 1
1081
+ 1079 1
1082
+ 1080 0
1083
+ 1081 0
1084
+ 1082 1
1085
+ 1083 1
1086
+ 1084 1
1087
+ 1085 1
1088
+ 1086 1
1089
+ 1087 1
1090
+ 1088 1
1091
+ 1089 1
1092
+ 1090 1
1093
+ 1091 0
1094
+ 1092 1
1095
+ 1093 1
1096
+ 1094 1
1097
+ 1095 1
1098
+ 1096 1
1099
+ 1097 1
1100
+ 1098 1
1101
+ 1099 1
1102
+ 1100 1
1103
+ 1101 1
1104
+ 1102 0
1105
+ 1103 1
1106
+ 1104 1
1107
+ 1105 0
1108
+ 1106 1
1109
+ 1107 0
1110
+ 1108 0
1111
+ 1109 1
1112
+ 1110 0
1113
+ 1111 1
1114
+ 1112 0
1115
+ 1113 0
1116
+ 1114 1
1117
+ 1115 1
1118
+ 1116 0
1119
+ 1117 1
1120
+ 1118 1
1121
+ 1119 1
1122
+ 1120 0
1123
+ 1121 0
1124
+ 1122 1
1125
+ 1123 1
1126
+ 1124 0
1127
+ 1125 1
1128
+ 1126 0
1129
+ 1127 0
1130
+ 1128 1
1131
+ 1129 1
1132
+ 1130 0
1133
+ 1131 1
1134
+ 1132 0
1135
+ 1133 1
1136
+ 1134 0
1137
+ 1135 1
1138
+ 1136 0
1139
+ 1137 1
1140
+ 1138 0
1141
+ 1139 0
1142
+ 1140 1
1143
+ 1141 1
1144
+ 1142 1
1145
+ 1143 1
1146
+ 1144 0
1147
+ 1145 1
1148
+ 1146 1
1149
+ 1147 0
1150
+ 1148 1
1151
+ 1149 1
1152
+ 1150 0
1153
+ 1151 1
1154
+ 1152 1
1155
+ 1153 0
1156
+ 1154 0
1157
+ 1155 1
1158
+ 1156 1
1159
+ 1157 0
1160
+ 1158 0
1161
+ 1159 1
1162
+ 1160 0
1163
+ 1161 0
1164
+ 1162 1
1165
+ 1163 1
1166
+ 1164 1
1167
+ 1165 0
1168
+ 1166 0
1169
+ 1167 1
1170
+ 1168 1
1171
+ 1169 0
1172
+ 1170 0
1173
+ 1171 1
1174
+ 1172 1
1175
+ 1173 1
1176
+ 1174 1
1177
+ 1175 0
1178
+ 1176 1
1179
+ 1177 0
1180
+ 1178 1
1181
+ 1179 1
1182
+ 1180 1
1183
+ 1181 1
1184
+ 1182 1
1185
+ 1183 1
1186
+ 1184 1
1187
+ 1185 1
1188
+ 1186 0
1189
+ 1187 1
1190
+ 1188 1
1191
+ 1189 0
1192
+ 1190 1
1193
+ 1191 0
1194
+ 1192 1
1195
+ 1193 1
1196
+ 1194 1
1197
+ 1195 1
1198
+ 1196 0
1199
+ 1197 1
1200
+ 1198 1
1201
+ 1199 1
1202
+ 1200 1
1203
+ 1201 1
1204
+ 1202 0
1205
+ 1203 1
1206
+ 1204 0
1207
+ 1205 1
1208
+ 1206 0
1209
+ 1207 0
1210
+ 1208 1
1211
+ 1209 1
1212
+ 1210 1
1213
+ 1211 1
1214
+ 1212 1
1215
+ 1213 1
1216
+ 1214 1
1217
+ 1215 1
1218
+ 1216 1
1219
+ 1217 1
1220
+ 1218 1
1221
+ 1219 1
1222
+ 1220 1
1223
+ 1221 1
1224
+ 1222 1
1225
+ 1223 1
1226
+ 1224 0
1227
+ 1225 1
1228
+ 1226 0
1229
+ 1227 1
1230
+ 1228 1
1231
+ 1229 1
1232
+ 1230 1
1233
+ 1231 0
1234
+ 1232 1
1235
+ 1233 1
1236
+ 1234 1
1237
+ 1235 1
1238
+ 1236 1
1239
+ 1237 1
1240
+ 1238 1
1241
+ 1239 1
1242
+ 1240 1
1243
+ 1241 0
1244
+ 1242 1
1245
+ 1243 1
1246
+ 1244 1
1247
+ 1245 1
1248
+ 1246 0
1249
+ 1247 1
1250
+ 1248 1
1251
+ 1249 1
1252
+ 1250 1
1253
+ 1251 1
1254
+ 1252 1
1255
+ 1253 1
1256
+ 1254 1
1257
+ 1255 1
1258
+ 1256 1
1259
+ 1257 1
1260
+ 1258 0
1261
+ 1259 1
1262
+ 1260 1
1263
+ 1261 1
1264
+ 1262 1
1265
+ 1263 1
1266
+ 1264 1
1267
+ 1265 1
1268
+ 1266 1
1269
+ 1267 1
1270
+ 1268 1
1271
+ 1269 1
1272
+ 1270 1
1273
+ 1271 1
1274
+ 1272 0
1275
+ 1273 0
1276
+ 1274 0
1277
+ 1275 1
1278
+ 1276 1
1279
+ 1277 1
1280
+ 1278 1
1281
+ 1279 0
1282
+ 1280 1
1283
+ 1281 1
1284
+ 1282 1
1285
+ 1283 0
1286
+ 1284 1
1287
+ 1285 1
1288
+ 1286 1
1289
+ 1287 0
1290
+ 1288 1
1291
+ 1289 1
1292
+ 1290 1
1293
+ 1291 1
1294
+ 1292 0
1295
+ 1293 0
1296
+ 1294 0
1297
+ 1295 1
1298
+ 1296 1
1299
+ 1297 1
1300
+ 1298 1
1301
+ 1299 1
1302
+ 1300 0
1303
+ 1301 0
1304
+ 1302 1
1305
+ 1303 1
1306
+ 1304 1
1307
+ 1305 0
1308
+ 1306 0
1309
+ 1307 1
1310
+ 1308 1
1311
+ 1309 0
1312
+ 1310 1
1313
+ 1311 1
1314
+ 1312 1
1315
+ 1313 1
1316
+ 1314 1
1317
+ 1315 1
1318
+ 1316 1
1319
+ 1317 1
1320
+ 1318 0
1321
+ 1319 1
1322
+ 1320 1
1323
+ 1321 0
1324
+ 1322 1
1325
+ 1323 1
1326
+ 1324 1
1327
+ 1325 1
1328
+ 1326 1
1329
+ 1327 1
1330
+ 1328 1
1331
+ 1329 0
1332
+ 1330 0
1333
+ 1331 1
1334
+ 1332 1
1335
+ 1333 0
1336
+ 1334 1
1337
+ 1335 0
1338
+ 1336 0
1339
+ 1337 0
1340
+ 1338 0
1341
+ 1339 1
1342
+ 1340 0
1343
+ 1341 0
1344
+ 1342 0
1345
+ 1343 0
1346
+ 1344 1
1347
+ 1345 1
1348
+ 1346 1
1349
+ 1347 1
1350
+ 1348 1
1351
+ 1349 1
1352
+ 1350 1
1353
+ 1351 1
1354
+ 1352 1
1355
+ 1353 0
1356
+ 1354 0
1357
+ 1355 0
1358
+ 1356 1
1359
+ 1357 0
1360
+ 1358 0
1361
+ 1359 1
1362
+ 1360 1
1363
+ 1361 1
1364
+ 1362 1
1365
+ 1363 0
1366
+ 1364 1
1367
+ 1365 1
1368
+ 1366 0
1369
+ 1367 0
1370
+ 1368 1
1371
+ 1369 1
1372
+ 1370 0
1373
+ 1371 0
1374
+ 1372 1
1375
+ 1373 1
1376
+ 1374 0
1377
+ 1375 1
1378
+ 1376 0
1379
+ 1377 1
1380
+ 1378 1
1381
+ 1379 1
1382
+ 1380 1
1383
+ 1381 1
1384
+ 1382 1
1385
+ 1383 0
1386
+ 1384 1
1387
+ 1385 1
1388
+ 1386 0
1389
+ 1387 0
1390
+ 1388 1
1391
+ 1389 1
1392
+ 1390 1
1393
+ 1391 0
1394
+ 1392 0
1395
+ 1393 1
1396
+ 1394 1
1397
+ 1395 1
1398
+ 1396 0
1399
+ 1397 1
1400
+ 1398 1
1401
+ 1399 1
1402
+ 1400 0
1403
+ 1401 0
1404
+ 1402 1
1405
+ 1403 0
1406
+ 1404 1
1407
+ 1405 0
1408
+ 1406 1
1409
+ 1407 1
1410
+ 1408 1
1411
+ 1409 1
1412
+ 1410 0
1413
+ 1411 0
1414
+ 1412 1
1415
+ 1413 1
1416
+ 1414 0
1417
+ 1415 1
1418
+ 1416 1
1419
+ 1417 0
1420
+ 1418 1
1421
+ 1419 1
1422
+ 1420 1
1423
+ 1421 1
1424
+ 1422 0
1425
+ 1423 0
1426
+ 1424 0
1427
+ 1425 1
1428
+ 1426 0
1429
+ 1427 1
1430
+ 1428 0
1431
+ 1429 0
1432
+ 1430 1
1433
+ 1431 1
1434
+ 1432 1
1435
+ 1433 0
1436
+ 1434 1
1437
+ 1435 0
1438
+ 1436 1
1439
+ 1437 1
1440
+ 1438 1
1441
+ 1439 1
1442
+ 1440 1
1443
+ 1441 1
1444
+ 1442 0
1445
+ 1443 0
1446
+ 1444 0
1447
+ 1445 1
1448
+ 1446 1
1449
+ 1447 1
1450
+ 1448 0
1451
+ 1449 1
1452
+ 1450 1
1453
+ 1451 0
1454
+ 1452 1
1455
+ 1453 1
1456
+ 1454 1
1457
+ 1455 1
1458
+ 1456 1
1459
+ 1457 1
1460
+ 1458 1
1461
+ 1459 1
1462
+ 1460 1
1463
+ 1461 1
1464
+ 1462 1
1465
+ 1463 0
1466
+ 1464 1
1467
+ 1465 1
1468
+ 1466 1
1469
+ 1467 1
1470
+ 1468 1
1471
+ 1469 0
1472
+ 1470 1
1473
+ 1471 1
1474
+ 1472 0
1475
+ 1473 0
1476
+ 1474 1
1477
+ 1475 0
1478
+ 1476 0
1479
+ 1477 1
1480
+ 1478 1
1481
+ 1479 0
1482
+ 1480 1
1483
+ 1481 0
1484
+ 1482 1
1485
+ 1483 0
1486
+ 1484 0
1487
+ 1485 1
1488
+ 1486 0
1489
+ 1487 0
1490
+ 1488 1
1491
+ 1489 1
1492
+ 1490 0
1493
+ 1491 1
1494
+ 1492 1
1495
+ 1493 1
1496
+ 1494 1
1497
+ 1495 1
1498
+ 1496 1
1499
+ 1497 0
1500
+ 1498 0
1501
+ 1499 0
1502
+ 1500 1
1503
+ 1501 1
1504
+ 1502 1
1505
+ 1503 0
1506
+ 1504 1
1507
+ 1505 1
1508
+ 1506 1
1509
+ 1507 1
1510
+ 1508 0
1511
+ 1509 0
1512
+ 1510 1
1513
+ 1511 1
1514
+ 1512 1
1515
+ 1513 1
1516
+ 1514 0
1517
+ 1515 1
1518
+ 1516 0
1519
+ 1517 1
1520
+ 1518 0
1521
+ 1519 0
1522
+ 1520 0
1523
+ 1521 0
1524
+ 1522 0
1525
+ 1523 1
1526
+ 1524 1
1527
+ 1525 1
1528
+ 1526 1
1529
+ 1527 1
1530
+ 1528 0
1531
+ 1529 1
1532
+ 1530 1
1533
+ 1531 1
1534
+ 1532 1
1535
+ 1533 1
1536
+ 1534 1
1537
+ 1535 0
1538
+ 1536 1
1539
+ 1537 1
1540
+ 1538 1
1541
+ 1539 0
1542
+ 1540 0
1543
+ 1541 1
1544
+ 1542 0
1545
+ 1543 1
1546
+ 1544 0
1547
+ 1545 0
1548
+ 1546 1
1549
+ 1547 0
1550
+ 1548 1
1551
+ 1549 1
1552
+ 1550 1
1553
+ 1551 0
1554
+ 1552 0
1555
+ 1553 0
1556
+ 1554 1
1557
+ 1555 1
1558
+ 1556 0
1559
+ 1557 1
1560
+ 1558 0
1561
+ 1559 1
1562
+ 1560 1
1563
+ 1561 0
1564
+ 1562 1
1565
+ 1563 1
1566
+ 1564 0
1567
+ 1565 1
1568
+ 1566 1
1569
+ 1567 1
1570
+ 1568 1
1571
+ 1569 1
1572
+ 1570 1
1573
+ 1571 1
1574
+ 1572 0
1575
+ 1573 0
1576
+ 1574 1
1577
+ 1575 1
1578
+ 1576 1
1579
+ 1577 0
1580
+ 1578 1
1581
+ 1579 1
1582
+ 1580 1
1583
+ 1581 1
1584
+ 1582 1
1585
+ 1583 1
1586
+ 1584 0
1587
+ 1585 0
1588
+ 1586 0
1589
+ 1587 1
1590
+ 1588 1
1591
+ 1589 1
1592
+ 1590 0
1593
+ 1591 1
1594
+ 1592 1
1595
+ 1593 0
1596
+ 1594 1
1597
+ 1595 1
1598
+ 1596 1
1599
+ 1597 0
1600
+ 1598 0
1601
+ 1599 1
1602
+ 1600 1
1603
+ 1601 1
1604
+ 1602 1
1605
+ 1603 1
1606
+ 1604 1
1607
+ 1605 0
1608
+ 1606 1
1609
+ 1607 1
1610
+ 1608 0
1611
+ 1609 1
1612
+ 1610 0
1613
+ 1611 1
1614
+ 1612 1
1615
+ 1613 1
1616
+ 1614 1
1617
+ 1615 1
1618
+ 1616 1
1619
+ 1617 0
1620
+ 1618 1
1621
+ 1619 1
1622
+ 1620 0
1623
+ 1621 0
1624
+ 1622 1
1625
+ 1623 1
1626
+ 1624 1
1627
+ 1625 0
1628
+ 1626 1
1629
+ 1627 1
1630
+ 1628 1
1631
+ 1629 0
1632
+ 1630 0
1633
+ 1631 1
1634
+ 1632 1
1635
+ 1633 1
1636
+ 1634 1
1637
+ 1635 1
1638
+ 1636 1
1639
+ 1637 1
1640
+ 1638 0
1641
+ 1639 0
1642
+ 1640 0
1643
+ 1641 1
1644
+ 1642 1
1645
+ 1643 1
1646
+ 1644 1
1647
+ 1645 1
1648
+ 1646 1
1649
+ 1647 1
1650
+ 1648 1
1651
+ 1649 1
1652
+ 1650 0
1653
+ 1651 0
1654
+ 1652 0
1655
+ 1653 1
1656
+ 1654 1
1657
+ 1655 1
1658
+ 1656 0
1659
+ 1657 0
1660
+ 1658 1
1661
+ 1659 1
1662
+ 1660 0
1663
+ 1661 0
1664
+ 1662 0
1665
+ 1663 1
1666
+ 1664 0
1667
+ 1665 0
1668
+ 1666 0
1669
+ 1667 1
1670
+ 1668 0
1671
+ 1669 1
1672
+ 1670 1
1673
+ 1671 0
1674
+ 1672 1
1675
+ 1673 1
1676
+ 1674 1
1677
+ 1675 1
1678
+ 1676 1
1679
+ 1677 1
1680
+ 1678 1
1681
+ 1679 1
1682
+ 1680 0
1683
+ 1681 1
1684
+ 1682 1
1685
+ 1683 0
1686
+ 1684 1
1687
+ 1685 1
1688
+ 1686 0
1689
+ 1687 0
1690
+ 1688 1
1691
+ 1689 1
1692
+ 1690 1
1693
+ 1691 0
1694
+ 1692 1
1695
+ 1693 1
1696
+ 1694 0
1697
+ 1695 1
1698
+ 1696 1
1699
+ 1697 0
1700
+ 1698 0
1701
+ 1699 1
1702
+ 1700 0
1703
+ 1701 1
1704
+ 1702 0
1705
+ 1703 1
1706
+ 1704 1
1707
+ 1705 1
1708
+ 1706 1
1709
+ 1707 0
1710
+ 1708 1
1711
+ 1709 1
1712
+ 1710 1
1713
+ 1711 1
1714
+ 1712 1
1715
+ 1713 0
1716
+ 1714 1
1717
+ 1715 1
1718
+ 1716 1
1719
+ 1717 1
1720
+ 1718 0
1721
+ 1719 1
1722
+ 1720 0
1723
+ 1721 0
1724
+ 1722 0
1725
+ 1723 1
1726
+ 1724 1
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/all_results.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 30.0,
3
+ "eval_accuracy": 0.9142156862745098,
4
+ "eval_combined_score": 0.92656683092085,
5
+ "eval_f1": 0.9389179755671903,
6
+ "eval_loss": 0.673913836479187,
7
+ "eval_runtime": 0.5729,
8
+ "eval_samples": 408,
9
+ "eval_samples_per_second": 712.173,
10
+ "eval_steps_per_second": 1.746
11
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/eval_results.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 30.0,
3
+ "eval_accuracy": 0.9142156862745098,
4
+ "eval_combined_score": 0.92656683092085,
5
+ "eval_f1": 0.9389179755671903,
6
+ "eval_loss": 0.673913836479187,
7
+ "eval_runtime": 0.5729,
8
+ "eval_samples": 408,
9
+ "eval_samples_per_second": 712.173,
10
+ "eval_steps_per_second": 1.746
11
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "T": 1.0,
3
+ "base_model_name_or_path": "microsoft/deberta-v3-base",
4
+ "bias": "none",
5
+ "drop_out": 0.0,
6
+ "inference_mode": false,
7
+ "layers_to_transform": null,
8
+ "modules_to_save": [
9
+ "classifier",
10
+ "pooler"
11
+ ],
12
+ "num_rotations": 1,
13
+ "peft_type": "ROTATION",
14
+ "r": 4,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "intermediate.dense",
18
+ "output.dense",
19
+ "value_proj",
20
+ "attention.output.dense",
21
+ "query_proj",
22
+ "key_proj"
23
+ ],
24
+ "target_modules_to_skip": null,
25
+ "task_type": "SEQ_CLS"
26
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[MASK]": 128000
3
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[SEP]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": {
9
+ "content": "[UNK]",
10
+ "lstrip": false,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ }
15
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
3
+ size 2464616
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft/tokenizer_config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[CLS]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[SEP]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[UNK]",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128000": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "bos_token": "[CLS]",
45
+ "clean_up_tokenization_spaces": false,
46
+ "cls_token": "[CLS]",
47
+ "do_lower_case": false,
48
+ "eos_token": "[SEP]",
49
+ "extra_special_tokens": {},
50
+ "mask_token": "[MASK]",
51
+ "model_max_length": 512,
52
+ "pad_token": "[PAD]",
53
+ "padding_side": "right",
54
+ "sep_token": "[SEP]",
55
+ "sp_model_kwargs": {},
56
+ "split_by_punct": false,
57
+ "tokenizer_class": "DebertaV2Tokenizer",
58
+ "unk_token": "[UNK]",
59
+ "vocab_type": "spm"
60
+ }
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/ft2/adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b7ac523189d4f334737eb019e5e9b4341211dcd9610d889def582abdbc973d2
3
+ size 7449859
nlu/glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/trainer_state.json ADDED
@@ -0,0 +1,655 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 1800,
3
+ "best_metric": 0.9142156862745098,
4
+ "best_model_checkpoint": "./glue_exp/mrpc/dr0.0,mlr5e-04,clr2e-03,ep=30.0t=24d16h55m38,r=4,s45/checkpoint-1800",
5
+ "epoch": 30.0,
6
+ "eval_steps": 100,
7
+ "global_step": 3450,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.8695652173913043,
14
+ "grad_norm": 4.347602367401123,
15
+ "learning_rate": 0.00198,
16
+ "loss": 0.5803,
17
+ "step": 100
18
+ },
19
+ {
20
+ "epoch": 0.8695652173913043,
21
+ "eval_accuracy": 0.8602941176470589,
22
+ "eval_combined_score": 0.8822478991596638,
23
+ "eval_f1": 0.9042016806722689,
24
+ "eval_loss": 0.45600226521492004,
25
+ "eval_runtime": 0.6867,
26
+ "eval_samples_per_second": 594.12,
27
+ "eval_steps_per_second": 1.456,
28
+ "step": 100
29
+ },
30
+ {
31
+ "epoch": 1.7391304347826086,
32
+ "grad_norm": 1.9366239309310913,
33
+ "learning_rate": 0.0019956152348614225,
34
+ "loss": 0.3704,
35
+ "step": 200
36
+ },
37
+ {
38
+ "epoch": 1.7391304347826086,
39
+ "eval_accuracy": 0.8480392156862745,
40
+ "eval_combined_score": 0.872864492331586,
41
+ "eval_f1": 0.8976897689768977,
42
+ "eval_loss": 0.3966831862926483,
43
+ "eval_runtime": 0.5852,
44
+ "eval_samples_per_second": 697.138,
45
+ "eval_steps_per_second": 1.709,
46
+ "step": 200
47
+ },
48
+ {
49
+ "epoch": 2.608695652173913,
50
+ "grad_norm": 3.378814458847046,
51
+ "learning_rate": 0.0019823226955326743,
52
+ "loss": 0.3041,
53
+ "step": 300
54
+ },
55
+ {
56
+ "epoch": 2.608695652173913,
57
+ "eval_accuracy": 0.8946078431372549,
58
+ "eval_combined_score": 0.9097821065599014,
59
+ "eval_f1": 0.924956369982548,
60
+ "eval_loss": 0.31703782081604004,
61
+ "eval_runtime": 0.5834,
62
+ "eval_samples_per_second": 699.371,
63
+ "eval_steps_per_second": 1.714,
64
+ "step": 300
65
+ },
66
+ {
67
+ "epoch": 3.4782608695652173,
68
+ "grad_norm": 2.2696948051452637,
69
+ "learning_rate": 0.0019602408686963785,
70
+ "loss": 0.2206,
71
+ "step": 400
72
+ },
73
+ {
74
+ "epoch": 3.4782608695652173,
75
+ "eval_accuracy": 0.8872549019607843,
76
+ "eval_combined_score": 0.9041085506367494,
77
+ "eval_f1": 0.9209621993127147,
78
+ "eval_loss": 0.4423012435436249,
79
+ "eval_runtime": 0.5798,
80
+ "eval_samples_per_second": 703.712,
81
+ "eval_steps_per_second": 1.725,
82
+ "step": 400
83
+ },
84
+ {
85
+ "epoch": 4.3478260869565215,
86
+ "grad_norm": 5.197021484375,
87
+ "learning_rate": 0.0019295673304908422,
88
+ "loss": 0.191,
89
+ "step": 500
90
+ },
91
+ {
92
+ "epoch": 4.3478260869565215,
93
+ "eval_accuracy": 0.8946078431372549,
94
+ "eval_combined_score": 0.9106770050439257,
95
+ "eval_f1": 0.9267461669505963,
96
+ "eval_loss": 0.3691416084766388,
97
+ "eval_runtime": 0.5749,
98
+ "eval_samples_per_second": 709.637,
99
+ "eval_steps_per_second": 1.739,
100
+ "step": 500
101
+ },
102
+ {
103
+ "epoch": 5.217391304347826,
104
+ "grad_norm": 5.0261335372924805,
105
+ "learning_rate": 0.001890576530999922,
106
+ "loss": 0.1352,
107
+ "step": 600
108
+ },
109
+ {
110
+ "epoch": 5.217391304347826,
111
+ "eval_accuracy": 0.8921568627450981,
112
+ "eval_combined_score": 0.9070713391739675,
113
+ "eval_f1": 0.9219858156028369,
114
+ "eval_loss": 0.473999559879303,
115
+ "eval_runtime": 0.5737,
116
+ "eval_samples_per_second": 711.204,
117
+ "eval_steps_per_second": 1.743,
118
+ "step": 600
119
+ },
120
+ {
121
+ "epoch": 6.086956521739131,
122
+ "grad_norm": 1.5393813848495483,
123
+ "learning_rate": 0.0018436173386234143,
124
+ "loss": 0.1133,
125
+ "step": 700
126
+ },
127
+ {
128
+ "epoch": 6.086956521739131,
129
+ "eval_accuracy": 0.8872549019607843,
130
+ "eval_combined_score": 0.9035577645692423,
131
+ "eval_f1": 0.9198606271777003,
132
+ "eval_loss": 0.5273416638374329,
133
+ "eval_runtime": 0.5734,
134
+ "eval_samples_per_second": 711.6,
135
+ "eval_steps_per_second": 1.744,
136
+ "step": 700
137
+ },
138
+ {
139
+ "epoch": 6.956521739130435,
140
+ "grad_norm": 7.234371662139893,
141
+ "learning_rate": 0.001789109918592965,
142
+ "loss": 0.0789,
143
+ "step": 800
144
+ },
145
+ {
146
+ "epoch": 6.956521739130435,
147
+ "eval_accuracy": 0.8921568627450981,
148
+ "eval_combined_score": 0.9060784313725491,
149
+ "eval_f1": 0.92,
150
+ "eval_loss": 0.4842986762523651,
151
+ "eval_runtime": 0.5746,
152
+ "eval_samples_per_second": 709.998,
153
+ "eval_steps_per_second": 1.74,
154
+ "step": 800
155
+ },
156
+ {
157
+ "epoch": 7.826086956521739,
158
+ "grad_norm": 2.4622912406921387,
159
+ "learning_rate": 0.001727541973562826,
160
+ "loss": 0.0699,
161
+ "step": 900
162
+ },
163
+ {
164
+ "epoch": 7.826086956521739,
165
+ "eval_accuracy": 0.8725490196078431,
166
+ "eval_combined_score": 0.8912918108419838,
167
+ "eval_f1": 0.9100346020761245,
168
+ "eval_loss": 0.5077530145645142,
169
+ "eval_runtime": 0.574,
170
+ "eval_samples_per_second": 710.746,
171
+ "eval_steps_per_second": 1.742,
172
+ "step": 900
173
+ },
174
+ {
175
+ "epoch": 8.695652173913043,
176
+ "grad_norm": 0.014082511886954308,
177
+ "learning_rate": 0.001659464379912601,
178
+ "loss": 0.0523,
179
+ "step": 1000
180
+ },
181
+ {
182
+ "epoch": 8.695652173913043,
183
+ "eval_accuracy": 0.8848039215686274,
184
+ "eval_combined_score": 0.9008090404303314,
185
+ "eval_f1": 0.9168141592920354,
186
+ "eval_loss": 0.5472626686096191,
187
+ "eval_runtime": 0.5723,
188
+ "eval_samples_per_second": 712.973,
189
+ "eval_steps_per_second": 1.747,
190
+ "step": 1000
191
+ },
192
+ {
193
+ "epoch": 9.565217391304348,
194
+ "grad_norm": 0.1075880229473114,
195
+ "learning_rate": 0.0015854862588059726,
196
+ "loss": 0.063,
197
+ "step": 1100
198
+ },
199
+ {
200
+ "epoch": 9.565217391304348,
201
+ "eval_accuracy": 0.8872549019607843,
202
+ "eval_combined_score": 0.9035577645692423,
203
+ "eval_f1": 0.9198606271777003,
204
+ "eval_loss": 0.5933418869972229,
205
+ "eval_runtime": 0.5735,
206
+ "eval_samples_per_second": 711.449,
207
+ "eval_steps_per_second": 1.744,
208
+ "step": 1100
209
+ },
210
+ {
211
+ "epoch": 10.434782608695652,
212
+ "grad_norm": 0.10556649416685104,
213
+ "learning_rate": 0.0015062695261068735,
214
+ "loss": 0.0312,
215
+ "step": 1200
216
+ },
217
+ {
218
+ "epoch": 10.434782608695652,
219
+ "eval_accuracy": 0.8897058823529411,
220
+ "eval_combined_score": 0.9044579681064526,
221
+ "eval_f1": 0.9192100538599641,
222
+ "eval_loss": 0.5497579574584961,
223
+ "eval_runtime": 0.574,
224
+ "eval_samples_per_second": 710.848,
225
+ "eval_steps_per_second": 1.742,
226
+ "step": 1200
227
+ },
228
+ {
229
+ "epoch": 11.304347826086957,
230
+ "grad_norm": 0.3477442264556885,
231
+ "learning_rate": 0.0014225229699174897,
232
+ "loss": 0.0367,
233
+ "step": 1300
234
+ },
235
+ {
236
+ "epoch": 11.304347826086957,
237
+ "eval_accuracy": 0.8872549019607843,
238
+ "eval_combined_score": 0.9015029088558499,
239
+ "eval_f1": 0.9157509157509157,
240
+ "eval_loss": 0.7066847085952759,
241
+ "eval_runtime": 0.574,
242
+ "eval_samples_per_second": 710.823,
243
+ "eval_steps_per_second": 1.742,
244
+ "step": 1300
245
+ },
246
+ {
247
+ "epoch": 12.173913043478262,
248
+ "grad_norm": 0.02786153554916382,
249
+ "learning_rate": 0.0013349959087290495,
250
+ "loss": 0.0323,
251
+ "step": 1400
252
+ },
253
+ {
254
+ "epoch": 12.173913043478262,
255
+ "eval_accuracy": 0.8848039215686274,
256
+ "eval_combined_score": 0.9003626047914693,
257
+ "eval_f1": 0.9159212880143113,
258
+ "eval_loss": 0.7087417244911194,
259
+ "eval_runtime": 0.5749,
260
+ "eval_samples_per_second": 709.65,
261
+ "eval_steps_per_second": 1.739,
262
+ "step": 1400
263
+ },
264
+ {
265
+ "epoch": 13.043478260869565,
266
+ "grad_norm": 0.6905132532119751,
267
+ "learning_rate": 0.001244471486928804,
268
+ "loss": 0.0301,
269
+ "step": 1500
270
+ },
271
+ {
272
+ "epoch": 13.043478260869565,
273
+ "eval_accuracy": 0.9068627450980392,
274
+ "eval_combined_score": 0.9195028011204482,
275
+ "eval_f1": 0.9321428571428572,
276
+ "eval_loss": 0.5149193406105042,
277
+ "eval_runtime": 0.5739,
278
+ "eval_samples_per_second": 710.867,
279
+ "eval_steps_per_second": 1.742,
280
+ "step": 1500
281
+ },
282
+ {
283
+ "epoch": 13.91304347826087,
284
+ "grad_norm": 1.0678400993347168,
285
+ "learning_rate": 0.0011517596676513472,
286
+ "loss": 0.0224,
287
+ "step": 1600
288
+ },
289
+ {
290
+ "epoch": 13.91304347826087,
291
+ "eval_accuracy": 0.8897058823529411,
292
+ "eval_combined_score": 0.9054483877614092,
293
+ "eval_f1": 0.9211908931698775,
294
+ "eval_loss": 0.6445961594581604,
295
+ "eval_runtime": 0.5759,
296
+ "eval_samples_per_second": 708.4,
297
+ "eval_steps_per_second": 1.736,
298
+ "step": 1600
299
+ },
300
+ {
301
+ "epoch": 14.782608695652174,
302
+ "grad_norm": 0.01732761226594448,
303
+ "learning_rate": 0.001057689985670419,
304
+ "loss": 0.0213,
305
+ "step": 1700
306
+ },
307
+ {
308
+ "epoch": 14.782608695652174,
309
+ "eval_accuracy": 0.8946078431372549,
310
+ "eval_combined_score": 0.9095183328164307,
311
+ "eval_f1": 0.9244288224956063,
312
+ "eval_loss": 0.524046003818512,
313
+ "eval_runtime": 0.5737,
314
+ "eval_samples_per_second": 711.122,
315
+ "eval_steps_per_second": 1.743,
316
+ "step": 1700
317
+ },
318
+ {
319
+ "epoch": 15.652173913043478,
320
+ "grad_norm": 0.05506271496415138,
321
+ "learning_rate": 0.0009631041251743559,
322
+ "loss": 0.0132,
323
+ "step": 1800
324
+ },
325
+ {
326
+ "epoch": 15.652173913043478,
327
+ "eval_accuracy": 0.9142156862745098,
328
+ "eval_combined_score": 0.92656683092085,
329
+ "eval_f1": 0.9389179755671903,
330
+ "eval_loss": 0.673913836479187,
331
+ "eval_runtime": 0.5748,
332
+ "eval_samples_per_second": 709.792,
333
+ "eval_steps_per_second": 1.74,
334
+ "step": 1800
335
+ },
336
+ {
337
+ "epoch": 16.52173913043478,
338
+ "grad_norm": 0.014215093106031418,
339
+ "learning_rate": 0.0008688483888352111,
340
+ "loss": 0.0134,
341
+ "step": 1900
342
+ },
343
+ {
344
+ "epoch": 16.52173913043478,
345
+ "eval_accuracy": 0.8995098039215687,
346
+ "eval_combined_score": 0.9126843775484877,
347
+ "eval_f1": 0.9258589511754068,
348
+ "eval_loss": 0.625953197479248,
349
+ "eval_runtime": 0.5727,
350
+ "eval_samples_per_second": 712.434,
351
+ "eval_steps_per_second": 1.746,
352
+ "step": 1900
353
+ },
354
+ {
355
+ "epoch": 17.391304347826086,
356
+ "grad_norm": 0.03137849271297455,
357
+ "learning_rate": 0.000775766125554205,
358
+ "loss": 0.0122,
359
+ "step": 2000
360
+ },
361
+ {
362
+ "epoch": 17.391304347826086,
363
+ "eval_accuracy": 0.8872549019607843,
364
+ "eval_combined_score": 0.9038350634371395,
365
+ "eval_f1": 0.9204152249134948,
366
+ "eval_loss": 0.6894273161888123,
367
+ "eval_runtime": 0.5759,
368
+ "eval_samples_per_second": 708.407,
369
+ "eval_steps_per_second": 1.736,
370
+ "step": 2000
371
+ },
372
+ {
373
+ "epoch": 18.26086956521739,
374
+ "grad_norm": 0.2494587004184723,
375
+ "learning_rate": 0.0006846901846358999,
376
+ "loss": 0.0164,
377
+ "step": 2100
378
+ },
379
+ {
380
+ "epoch": 18.26086956521739,
381
+ "eval_accuracy": 0.8970588235294118,
382
+ "eval_combined_score": 0.9118161250514192,
383
+ "eval_f1": 0.9265734265734266,
384
+ "eval_loss": 0.6536701917648315,
385
+ "eval_runtime": 0.5739,
386
+ "eval_samples_per_second": 710.931,
387
+ "eval_steps_per_second": 1.742,
388
+ "step": 2100
389
+ },
390
+ {
391
+ "epoch": 19.130434782608695,
392
+ "grad_norm": 1.4710524082183838,
393
+ "learning_rate": 0.0005964354639070397,
394
+ "loss": 0.0111,
395
+ "step": 2200
396
+ },
397
+ {
398
+ "epoch": 19.130434782608695,
399
+ "eval_accuracy": 0.8970588235294118,
400
+ "eval_combined_score": 0.911029411764706,
401
+ "eval_f1": 0.925,
402
+ "eval_loss": 0.6434887051582336,
403
+ "eval_runtime": 0.5745,
404
+ "eval_samples_per_second": 710.204,
405
+ "eval_steps_per_second": 1.741,
406
+ "step": 2200
407
+ },
408
+ {
409
+ "epoch": 20.0,
410
+ "grad_norm": 0.0021493160165846348,
411
+ "learning_rate": 0.0005117916184554203,
412
+ "loss": 0.0116,
413
+ "step": 2300
414
+ },
415
+ {
416
+ "epoch": 20.0,
417
+ "eval_accuracy": 0.8946078431372549,
418
+ "eval_combined_score": 0.9092508242234947,
419
+ "eval_f1": 0.9238938053097345,
420
+ "eval_loss": 0.7062434554100037,
421
+ "eval_runtime": 0.573,
422
+ "eval_samples_per_second": 712.091,
423
+ "eval_steps_per_second": 1.745,
424
+ "step": 2300
425
+ },
426
+ {
427
+ "epoch": 20.869565217391305,
428
+ "grad_norm": 0.08960673213005066,
429
+ "learning_rate": 0.0004315159952270119,
430
+ "loss": 0.0065,
431
+ "step": 2400
432
+ },
433
+ {
434
+ "epoch": 20.869565217391305,
435
+ "eval_accuracy": 0.8995098039215687,
436
+ "eval_combined_score": 0.9128179650238474,
437
+ "eval_f1": 0.9261261261261261,
438
+ "eval_loss": 0.7081242203712463,
439
+ "eval_runtime": 0.578,
440
+ "eval_samples_per_second": 705.838,
441
+ "eval_steps_per_second": 1.73,
442
+ "step": 2400
443
+ },
444
+ {
445
+ "epoch": 21.73913043478261,
446
+ "grad_norm": 0.012382442131638527,
447
+ "learning_rate": 0.0003563268566987077,
448
+ "loss": 0.0061,
449
+ "step": 2500
450
+ },
451
+ {
452
+ "epoch": 21.73913043478261,
453
+ "eval_accuracy": 0.8946078431372549,
454
+ "eval_combined_score": 0.9092508242234947,
455
+ "eval_f1": 0.9238938053097345,
456
+ "eval_loss": 0.7085114121437073,
457
+ "eval_runtime": 0.573,
458
+ "eval_samples_per_second": 712.01,
459
+ "eval_steps_per_second": 1.745,
460
+ "step": 2500
461
+ },
462
+ {
463
+ "epoch": 22.608695652173914,
464
+ "grad_norm": 0.0018653898732736707,
465
+ "learning_rate": 0.0002868969542575783,
466
+ "loss": 0.0078,
467
+ "step": 2600
468
+ },
469
+ {
470
+ "epoch": 22.608695652173914,
471
+ "eval_accuracy": 0.8872549019607843,
472
+ "eval_combined_score": 0.9031344932339133,
473
+ "eval_f1": 0.9190140845070423,
474
+ "eval_loss": 0.7475114464759827,
475
+ "eval_runtime": 0.5754,
476
+ "eval_samples_per_second": 709.044,
477
+ "eval_steps_per_second": 1.738,
478
+ "step": 2600
479
+ },
480
+ {
481
+ "epoch": 23.47826086956522,
482
+ "grad_norm": 0.0017870579613372684,
483
+ "learning_rate": 0.00022384750878852333,
484
+ "loss": 0.0038,
485
+ "step": 2700
486
+ },
487
+ {
488
+ "epoch": 23.47826086956522,
489
+ "eval_accuracy": 0.8970588235294118,
490
+ "eval_combined_score": 0.9116873065015481,
491
+ "eval_f1": 0.9263157894736842,
492
+ "eval_loss": 0.7797441482543945,
493
+ "eval_runtime": 0.5734,
494
+ "eval_samples_per_second": 711.491,
495
+ "eval_steps_per_second": 1.744,
496
+ "step": 2700
497
+ },
498
+ {
499
+ "epoch": 24.347826086956523,
500
+ "grad_norm": 0.001991760218515992,
501
+ "learning_rate": 0.00016774265232874353,
502
+ "loss": 0.0059,
503
+ "step": 2800
504
+ },
505
+ {
506
+ "epoch": 24.347826086956523,
507
+ "eval_accuracy": 0.9019607843137255,
508
+ "eval_combined_score": 0.9155193992490613,
509
+ "eval_f1": 0.9290780141843972,
510
+ "eval_loss": 0.7348427772521973,
511
+ "eval_runtime": 0.5751,
512
+ "eval_samples_per_second": 709.446,
513
+ "eval_steps_per_second": 1.739,
514
+ "step": 2800
515
+ },
516
+ {
517
+ "epoch": 25.217391304347824,
518
+ "grad_norm": 0.000965337676461786,
519
+ "learning_rate": 0.00011908438052207082,
520
+ "loss": 0.0029,
521
+ "step": 2900
522
+ },
523
+ {
524
+ "epoch": 25.217391304347824,
525
+ "eval_accuracy": 0.8995098039215687,
526
+ "eval_combined_score": 0.9135996991389148,
527
+ "eval_f1": 0.927689594356261,
528
+ "eval_loss": 0.7361888289451599,
529
+ "eval_runtime": 0.5744,
530
+ "eval_samples_per_second": 710.324,
531
+ "eval_steps_per_second": 1.741,
532
+ "step": 2900
533
+ },
534
+ {
535
+ "epoch": 26.08695652173913,
536
+ "grad_norm": 0.0016472822753712535,
537
+ "learning_rate": 7.830806103584498e-05,
538
+ "loss": 0.0044,
539
+ "step": 3000
540
+ },
541
+ {
542
+ "epoch": 26.08695652173913,
543
+ "eval_accuracy": 0.8970588235294118,
544
+ "eval_combined_score": 0.9116873065015481,
545
+ "eval_f1": 0.9263157894736842,
546
+ "eval_loss": 0.7820258140563965,
547
+ "eval_runtime": 0.5737,
548
+ "eval_samples_per_second": 711.172,
549
+ "eval_steps_per_second": 1.743,
550
+ "step": 3000
551
+ },
552
+ {
553
+ "epoch": 26.956521739130434,
554
+ "grad_norm": 0.001823047990910709,
555
+ "learning_rate": 4.577853812857102e-05,
556
+ "loss": 0.0036,
557
+ "step": 3100
558
+ },
559
+ {
560
+ "epoch": 26.956521739130434,
561
+ "eval_accuracy": 0.8995098039215687,
562
+ "eval_combined_score": 0.9135996991389148,
563
+ "eval_f1": 0.927689594356261,
564
+ "eval_loss": 0.7820542454719543,
565
+ "eval_runtime": 0.5754,
566
+ "eval_samples_per_second": 709.051,
567
+ "eval_steps_per_second": 1.738,
568
+ "step": 3100
569
+ },
570
+ {
571
+ "epoch": 27.82608695652174,
572
+ "grad_norm": 0.009837474673986435,
573
+ "learning_rate": 2.178686822255904e-05,
574
+ "loss": 0.0013,
575
+ "step": 3200
576
+ },
577
+ {
578
+ "epoch": 27.82608695652174,
579
+ "eval_accuracy": 0.8995098039215687,
580
+ "eval_combined_score": 0.9135996991389148,
581
+ "eval_f1": 0.927689594356261,
582
+ "eval_loss": 0.793246328830719,
583
+ "eval_runtime": 0.5725,
584
+ "eval_samples_per_second": 712.603,
585
+ "eval_steps_per_second": 1.747,
586
+ "step": 3200
587
+ },
588
+ {
589
+ "epoch": 28.695652173913043,
590
+ "grad_norm": 0.02098868414759636,
591
+ "learning_rate": 6.547715689861789e-06,
592
+ "loss": 0.001,
593
+ "step": 3300
594
+ },
595
+ {
596
+ "epoch": 28.695652173913043,
597
+ "eval_accuracy": 0.8995098039215687,
598
+ "eval_combined_score": 0.9135996991389148,
599
+ "eval_f1": 0.927689594356261,
600
+ "eval_loss": 0.7973781824111938,
601
+ "eval_runtime": 0.5737,
602
+ "eval_samples_per_second": 711.127,
603
+ "eval_steps_per_second": 1.743,
604
+ "step": 3300
605
+ },
606
+ {
607
+ "epoch": 29.565217391304348,
608
+ "grad_norm": 0.00035156868398189545,
609
+ "learning_rate": 1.97432152599486e-07,
610
+ "loss": 0.0017,
611
+ "step": 3400
612
+ },
613
+ {
614
+ "epoch": 29.565217391304348,
615
+ "eval_accuracy": 0.8995098039215687,
616
+ "eval_combined_score": 0.9135996991389148,
617
+ "eval_f1": 0.927689594356261,
618
+ "eval_loss": 0.7983468770980835,
619
+ "eval_runtime": 0.5733,
620
+ "eval_samples_per_second": 711.635,
621
+ "eval_steps_per_second": 1.744,
622
+ "step": 3400
623
+ },
624
+ {
625
+ "epoch": 30.0,
626
+ "step": 3450,
627
+ "total_flos": 1.83610838283264e+16,
628
+ "train_loss": 0.07176048623262972,
629
+ "train_runtime": 655.6937,
630
+ "train_samples_per_second": 167.822,
631
+ "train_steps_per_second": 5.262
632
+ }
633
+ ],
634
+ "logging_steps": 100,
635
+ "max_steps": 3450,
636
+ "num_input_tokens_seen": 0,
637
+ "num_train_epochs": 30,
638
+ "save_steps": 100,
639
+ "stateful_callbacks": {
640
+ "TrainerControl": {
641
+ "args": {
642
+ "should_epoch_stop": false,
643
+ "should_evaluate": false,
644
+ "should_log": false,
645
+ "should_save": true,
646
+ "should_training_stop": true
647
+ },
648
+ "attributes": {}
649
+ }
650
+ },
651
+ "total_flos": 1.83610838283264e+16,
652
+ "train_batch_size": 32,
653
+ "trial_name": null,
654
+ "trial_params": null
655
+ }