Invalid JSON: Expected ',' or ']' after array element in JSONat line 352, column 15
| [ | |
| { | |
| "loss": 0.7294, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.05, | |
| "step": 1 | |
| }, | |
| { | |
| "loss": 0.6837, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.11, | |
| "step": 2 | |
| }, | |
| { | |
| "loss": 0.7207, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.16, | |
| "step": 3 | |
| }, | |
| { | |
| "loss": 0.7014, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.21, | |
| "step": 4 | |
| }, | |
| { | |
| "loss": 0.6994, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.26, | |
| "step": 5 | |
| }, | |
| { | |
| "loss": 0.6877, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.32, | |
| "step": 6 | |
| }, | |
| { | |
| "loss": 0.6923, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.37, | |
| "step": 7 | |
| }, | |
| { | |
| "loss": 0.706, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.42, | |
| "step": 8 | |
| }, | |
| { | |
| "loss": 0.7193, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.47, | |
| "step": 9 | |
| }, | |
| { | |
| "loss": 0.7082, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.53, | |
| "step": 10 | |
| }, | |
| { | |
| "loss": 0.6847, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.58, | |
| "step": 11 | |
| }, | |
| { | |
| "loss": 0.7139, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.63, | |
| "step": 12 | |
| }, | |
| { | |
| "loss": 0.7051, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.68, | |
| "step": 13 | |
| }, | |
| { | |
| "loss": 0.7057, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.74, | |
| "step": 14 | |
| }, | |
| { | |
| "loss": 0.6808, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.79, | |
| "step": 15 | |
| }, | |
| { | |
| "loss": 0.7065, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.84, | |
| "step": 16 | |
| }, | |
| { | |
| "loss": 0.6851, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.89, | |
| "step": 17 | |
| }, | |
| { | |
| "loss": 0.6962, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 0.95, | |
| "step": 18 | |
| }, | |
| { | |
| "loss": 0.6754, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.0, | |
| "step": 19 | |
| }, | |
| { | |
| "loss": 0.703, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.05, | |
| "step": 20 | |
| }, | |
| { | |
| "loss": 0.6805, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.11, | |
| "step": 21 | |
| }, | |
| { | |
| "loss": 0.6911, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.16, | |
| "step": 22 | |
| }, | |
| { | |
| "loss": 0.6972, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.21, | |
| "step": 23 | |
| }, | |
| { | |
| "loss": 0.7043, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.26, | |
| "step": 24 | |
| }, | |
| { | |
| "loss": 0.6986, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.32, | |
| "step": 25 | |
| }, | |
| { | |
| "eval_code_is_correct_loss": 0.6981677412986755, | |
| "eval_code_is_correct_score": -0.25274571776390076, | |
| "eval_code_is_correct_brier_score": 0.25274571776390076, | |
| "eval_code_is_correct_average_probability": 0.4975625276565552, | |
| "eval_code_is_correct_accuracy": 0.43, | |
| "eval_code_is_correct_probabilities": [ | |
| 0.5088271498680115, | |
| 0.48198673129081726, | |
| 0.4981909990310669, | |
| 0.5024533271789551, | |
| 0.48107844591140747, | |
| 0.5089389681816101, | |
| 0.4841275215148926, | |
| 0.4966141879558563, | |
| 0.5106652975082397, | |
| 0.5127133131027222, | |
| 0.4725325107574463, | |
| 0.5244400501251221, | |
| 0.511084258556366, | |
| 0.521553635597229, | |
| 0.4971517324447632, | |
| 0.48880547285079956, | |
| 0.4734472632408142, | |
| 0.5228509306907654, | |
| 0.4940826892852783, | |
| 0.47331884503364563, | |
| 0.5112957954406738, | |
| 0.4850262403488159, | |
| 0.4836397171020508, | |
| 0.49904710054397583, | |
| 0.528581440448761, | |
| 0.5303167700767517, | |
| 0.5002449750900269, | |
| 0.5001938343048096, | |
| 0.4752185046672821, | |
| 0.514567494392395, | |
| 0.4778810441493988, | |
| 0.4975382685661316, | |
| 0.5223871469497681, | |
| 0.48589223623275757, | |
| 0.46854928135871887, | |
| 0.5050678849220276, | |
| 0.47625279426574707, | |
| 0.48999258875846863, | |
| 0.4709635376930237, | |
| 0.5040507316589355, | |
| 0.4879027009010315, | |
| 0.5210784077644348, | |
| 0.5200101137161255, | |
| 0.49343761801719666, | |
| 0.47951218485832214, | |
| 0.49272558093070984, | |
| 0.5004433393478394, | |
| 0.5177253484725952, | |
| 0.5200391411781311, | |
| 0.5149074792861938, | |
| 0.5044199824333191, | |
| 0.4863050580024719, | |
| 0.48952627182006836, | |
| 0.48324888944625854, | |
| 0.4773913621902466, | |
| 0.4935683608055115, | |
| 0.5272597074508667, | |
| 0.4823913872241974, | |
| 0.4688487946987152, | |
| 0.4804125130176544, | |
| 0.48838281631469727, | |
| 0.49300506711006165, | |
| 0.4761504828929901, | |
| 0.5559667348861694, | |
| 0.4856325089931488, | |
| 0.4806632697582245, | |
| 0.499020516872406, | |
| 0.5097540020942688, | |
| 0.5151796936988831, | |
| 0.5133408904075623, | |
| 0.4923568367958069, | |
| 0.5121997594833374, | |
| 0.49316778779029846, | |
| 0.4804587960243225, | |
| 0.5247389078140259, | |
| 0.48702362179756165, | |
| 0.48010408878326416, | |
| 0.4811219871044159, | |
| 0.48185238242149353, | |
| 0.4993566572666168, | |
| 0.4789592921733856, | |
| 0.481153279542923, | |
| 0.5070401430130005, | |
| 0.5011581182479858, | |
| 0.4959249794483185, | |
| 0.481794536113739, | |
| 0.4979727566242218, | |
| 0.5089129209518433, | |
| 0.5138169527053833, | |
| 0.48047563433647156, | |
| 0.5165080428123474, | |
| 0.5102388262748718, | |
| 0.515179455280304, | |
| 0.5202771425247192, | |
| 0.49016904830932617, | |
| 0.517183244228363, | |
| 0.50786954164505, | |
| 0.5065057873725891, | |
| 0.47355517745018005, | |
| 0.4693579375743866 | |
| ], | |
| "eval_code_is_correct_runtime": 29.2492, | |
| "eval_code_is_correct_samples_per_second": 3.419, | |
| "eval_code_is_correct_steps_per_second": 0.068, | |
| "epoch": 1.32, | |
| "step": 25 | |
| }, | |
| { | |
| "loss": 0.696, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.37, | |
| "step": 26 | |
| }, | |
| { | |
| "loss": 0.6871, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.42, | |
| "step": 27 | |
| }, | |
| { | |
| "loss": 0.6972, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.47, | |
| "step": 28 | |
| }, | |
| { | |
| "loss": 0.6813, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.53, | |
| "step": 29 | |
| }, | |
| { | |
| "loss": 0.6828, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.58, | |
| "step": 30 | |
| }, | |
| { | |
| "loss": 0.6826, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.63, | |
| "step": 31 | |
| }, | |
| { | |
| "loss": 0.6938, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.68, | |
| "step": 32 | |
| }, | |
| { | |
| "loss": 0.6955, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.74, | |
| "step": 33 | |
| }, | |
| { | |
| "loss": 0.6808, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.79, | |
| "step": 34 | |
| }, | |
| { | |
| "loss": 0.6886, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.84, | |
| "step": 35 | |
| }, | |
| { | |
| "loss": 0.6998, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.89, | |
| "step": 36 | |
| }, | |
| { | |
| "loss": 0.6769, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 1.95, | |
| "step": 37 | |
| }, | |
| { | |
| "loss": 0.7146, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.0, | |
| "step": 38 | |
| }, | |
| { | |
| "loss": 0.6642, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.05, | |
| "step": 39 | |
| }, | |
| "loss": 0.684, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.11, | |
| "step": 40 | |
| }, | |
| { | |
| "loss": 0.6962, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.16, | |
| "step": 41 | |
| }, | |
| { | |
| "loss": 0.7021, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.21, | |
| "step": 42 | |
| }, | |
| { | |
| "loss": 0.702, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.26, | |
| "step": 43 | |
| }, | |
| { | |
| "loss": 0.6701, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.32, | |
| "step": 44 | |
| }, | |
| { | |
| "loss": 0.7221, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.37, | |
| "step": 45 | |
| }, | |
| { | |
| "loss": 0.6895, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.42, | |
| "step": 46 | |
| }, | |
| { | |
| "loss": 0.6897, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.47, | |
| "step": 47 | |
| }, | |
| { | |
| "loss": 0.7001, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.53, | |
| "step": 48 | |
| }, | |
| { | |
| "loss": 0.6845, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.58, | |
| "step": 49 | |
| }, | |
| { | |
| "loss": 0.702, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.63, | |
| "step": 50 | |
| }, | |
| { | |
| "eval_code_is_correct_loss": 0.6996689438819885, | |
| "eval_code_is_correct_score": -0.25365304946899414, | |
| "eval_code_is_correct_brier_score": 0.25365304946899414, | |
| "eval_code_is_correct_average_probability": 0.4967172145843506, | |
| "eval_code_is_correct_accuracy": 0.43, | |
| "eval_code_is_correct_probabilities": [ | |
| 0.5074375867843628, | |
| 0.4723518490791321, | |
| 0.48632118105888367, | |
| 0.49848517775535583, | |
| 0.476654589176178, | |
| 0.5107546448707581, | |
| 0.4704548716545105, | |
| 0.49364662170410156, | |
| 0.49723079800605774, | |
| 0.49965834617614746, | |
| 0.4738099277019501, | |
| 0.5174873471260071, | |
| 0.5036610960960388, | |
| 0.5156582593917847, | |
| 0.49093103408813477, | |
| 0.49312105774879456, | |
| 0.4914981424808502, | |
| 0.5185192823410034, | |
| 0.5077788233757019, | |
| 0.4805068373680115, | |
| 0.4961990416049957, | |
| 0.49203917384147644, | |
| 0.4657118618488312, | |
| 0.4971868097782135, | |
| 0.5209845304489136, | |
| 0.5242859721183777, | |
| 0.5050302147865295, | |
| 0.5064529776573181, | |
| 0.47357138991355896, | |
| 0.5292315483093262, | |
| 0.48264941573143005, | |
| 0.5002627968788147, | |
| 0.5115101933479309, | |
| 0.4869055151939392, | |
| 0.46029502153396606, | |
| 0.49682366847991943, | |
| 0.4798851013183594, | |
| 0.4962373971939087, | |
| 0.4617874026298523, | |
| 0.5052241683006287, | |
| 0.4935094118118286, | |
| 0.5108048319816589, | |
| 0.5389617085456848, | |
| 0.49815618991851807, | |
| 0.47433826327323914, | |
| 0.49846169352531433, | |
| 0.49559274315834045, | |
| 0.5151790976524353, | |
| 0.5131077170372009, | |
| 0.5054615139961243, | |
| 0.4999138414859772, | |
| 0.5005282163619995, | |
| 0.4700585603713989, | |
| 0.4792395234107971, | |
| 0.484203577041626, | |
| 0.5069689750671387, | |
| 0.5312584042549133, | |
| 0.48872625827789307, | |
| 0.4664309024810791, | |
| 0.4882381856441498, | |
| 0.49106600880622864, | |
| 0.5087692737579346, | |
| 0.4675412178039551, | |
| 0.573962390422821, | |
| 0.4947565197944641, | |
| 0.4820893108844757, | |
| 0.5046402215957642, | |
| 0.5246191620826721, | |
| 0.5146574378013611, | |
| 0.5106992125511169, | |
| 0.5022192597389221, | |
| 0.5226336717605591, | |
| 0.5060445070266724, | |
| 0.481097012758255, | |
| 0.5169151425361633, | |
| 0.4949752390384674, | |
| 0.4639112055301666, | |
| 0.4817794859409332, | |
| 0.48367029428482056, | |
| 0.4781152904033661, | |
| 0.47685888409614563, | |
| 0.48095741868019104, | |
| 0.491726815700531, | |
| 0.5017462968826294, | |
| 0.4904364347457886, | |
| 0.4698854982852936, | |
| 0.5083165168762207, | |
| 0.5017600655555725, | |
| 0.5108470916748047, | |
| 0.48250630497932434, | |
| 0.5158757567405701, | |
| 0.5019074082374573, | |
| 0.5176712274551392, | |
| 0.5154513716697693, | |
| 0.503238320350647, | |
| 0.5167521834373474, | |
| 0.4993845224380493, | |
| 0.4867921769618988, | |
| 0.47787371277809143, | |
| 0.46018683910369873 | |
| ], | |
| "eval_code_is_correct_runtime": 29.2407, | |
| "eval_code_is_correct_samples_per_second": 3.42, | |
| "eval_code_is_correct_steps_per_second": 0.068, | |
| "epoch": 2.63, | |
| "step": 50 | |
| }, | |
| { | |
| "loss": 0.6957, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.68, | |
| "step": 51 | |
| }, | |
| { | |
| "loss": 0.6726, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.74, | |
| "step": 52 | |
| }, | |
| { | |
| "loss": 0.6883, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.79, | |
| "step": 53 | |
| }, | |
| { | |
| "loss": 0.6894, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.84, | |
| "step": 54 | |
| }, | |
| { | |
| "loss": 0.6743, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.89, | |
| "step": 55 | |
| }, | |
| { | |
| "loss": 0.6833, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 2.95, | |
| "step": 56 | |
| }, | |
| { | |
| "loss": 0.6786, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.0, | |
| "step": 57 | |
| }, | |
| { | |
| "loss": 0.699, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.05, | |
| "step": 58 | |
| }, | |
| { | |
| "loss": 0.6791, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.11, | |
| "step": 59 | |
| }, | |
| { | |
| "loss": 0.6751, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.16, | |
| "step": 60 | |
| }, | |
| { | |
| "loss": 0.6812, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.21, | |
| "step": 61 | |
| }, | |
| { | |
| "loss": 0.6724, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.26, | |
| "step": 62 | |
| }, | |
| { | |
| "loss": 0.6829, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.32, | |
| "step": 63 | |
| }, | |
| { | |
| "loss": 0.6842, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.37, | |
| "step": 64 | |
| }, | |
| { | |
| "loss": 0.6806, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.42, | |
| "step": 65 | |
| }, | |
| { | |
| "loss": 0.6905, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.47, | |
| "step": 66 | |
| }, | |
| { | |
| "loss": 0.6845, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.53, | |
| "step": 67 | |
| }, | |
| { | |
| "loss": 0.6722, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.58, | |
| "step": 68 | |
| }, | |
| { | |
| "loss": 0.6976, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.63, | |
| "step": 69 | |
| }, | |
| { | |
| "loss": 0.6757, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.68, | |
| "step": 70 | |
| }, | |
| { | |
| "loss": 0.6638, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.74, | |
| "step": 71 | |
| }, | |
| { | |
| "loss": 0.6589, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.79, | |
| "step": 72 | |
| }, | |
| { | |
| "loss": 0.6792, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.84, | |
| "step": 73 | |
| }, | |
| { | |
| "loss": 0.6868, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.89, | |
| "step": 74 | |
| }, | |
| { | |
| "loss": 0.6743, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 3.95, | |
| "step": 75 | |
| }, | |
| { | |
| "eval_code_is_correct_loss": 0.7115283012390137, | |
| "eval_code_is_correct_score": -0.25936323404312134, | |
| "eval_code_is_correct_brier_score": 0.25936323404312134, | |
| "eval_code_is_correct_average_probability": 0.492844820022583, | |
| "eval_code_is_correct_accuracy": 0.45, | |
| ""eval_code_is_correct_probabilities": [ | |
| 0.5315108895301819, | |
| 0.4404357671737671, | |
| 0.4393325448036194, | |
| 0.5136876702308655, | |
| 0.4317817986011505, | |
| 0.5407170057296753, | |
| 0.42210498452186584, | |
| 0.5032879114151001, | |
| 0.499985009431839, | |
| 0.5063377022743225, | |
| 0.44853413105010986, | |
| 0.5249595642089844, | |
| 0.519929051399231, | |
| 0.5444207191467285, | |
| 0.46772903203964233, | |
| 0.4709188640117645, | |
| 0.49579933285713196, | |
| 0.5452953577041626, | |
| 0.5071123838424683, | |
| 0.4668845534324646, | |
| 0.5050070285797119, | |
| 0.4854079782962799, | |
| 0.4207446873188019, | |
| 0.5136836171150208, | |
| 0.5283437371253967, | |
| 0.5321181416511536, | |
| 0.4974518120288849, | |
| 0.4980766177177429, | |
| 0.43917903304100037, | |
| 0.5816516876220703, | |
| 0.4618088901042938, | |
| 0.48619911074638367, | |
| 0.5236653089523315, | |
| 0.45910876989364624, | |
| 0.40605857968330383, | |
| 0.4992276430130005, | |
| 0.45045509934425354, | |
| 0.47411617636680603, | |
| 0.40248948335647583, | |
| 0.5305703282356262, | |
| 0.4813811182975769, | |
| 0.5272884368896484, | |
| 0.6161030530929565, | |
| 0.46347883343696594, | |
| 0.43354177474975586, | |
| 0.4877569079399109, | |
| 0.5148259997367859, | |
| 0.5279054045677185, | |
| 0.5294895172119141, | |
| 0.5120590329170227, | |
| 0.5139973163604736, | |
| 0.5520778298377991, | |
| 0.45969536900520325, | |
| 0.4304928183555603, | |
| 0.48464471101760864, | |
| 0.5018194913864136, | |
| 0.5791583061218262, | |
| 0.47863584756851196, | |
| 0.41838565468788147, | |
| 0.4741626977920532, | |
| 0.4651537537574768, | |
| 0.5022154450416565, | |
| 0.4011237621307373, | |
| 0.6317132115364075, | |
| 0.4900273084640503, | |
| 0.44324833154678345, | |
| 0.4855543375015259, | |
| 0.5688998103141785, | |
| 0.532179057598114, | |
| 0.5357102155685425, | |
| 0.48324429988861084, | |
| 0.5526444911956787, | |
| 0.539372444152832, | |
| 0.44041797518730164, | |
| 0.5359059572219849, | |
| 0.46845659613609314, | |
| 0.42640483379364014, | |
| 0.4501952528953552, | |
| 0.4579535722732544, | |
| 0.4656803011894226, | |
| 0.43683329224586487, | |
| 0.448000431060791, | |
| 0.482930064201355, | |
| 0.5224722623825073, | |
| 0.4616173803806305, | |
| 0.42225685715675354, | |
| 0.48842281103134155, | |
| 0.5094820261001587, | |
| 0.5359870195388794, | |
| 0.45143479108810425, | |
| 0.5495825409889221, | |
| 0.5185818672180176, | |
| 0.5359978675842285, | |
| 0.5442188382148743, | |
| 0.5404256582260132, | |
| 0.5551626682281494, | |
| 0.5226612091064453, | |
| 0.4831567108631134, | |
| 0.4785938560962677, | |
| 0.41752859950065613 | |
| ], | |
| "eval_code_is_correct_runtime": 29.2291, | |
| "eval_code_is_correct_samples_per_second": 3.421, | |
| "eval_code_is_correct_steps_per_second": 0.068, | |
| "epoch": 3.95, | |
| "step": 75 | |
| }, | |
| { | |
| "loss": 0.6737, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.0, | |
| "step": 76 | |
| }, | |
| { | |
| "loss": 0.6813, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.05, | |
| "step": 77 | |
| }, | |
| { | |
| "loss": 0.6845, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.11, | |
| "step": 78 | |
| }, | |
| { | |
| "loss": 0.68, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.16, | |
| "step": 79 | |
| }, | |
| { | |
| "loss": 0.6638, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.21, | |
| "step": 80 | |
| }, | |
| { | |
| "loss": 0.7058, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.26, | |
| "step": 81 | |
| }, | |
| { | |
| "loss": 0.6644, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.32, | |
| "step": 82 | |
| }, | |
| { | |
| "loss": 0.6641, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.37, | |
| "step": 83 | |
| }, | |
| { | |
| "loss": 0.665, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.42, | |
| "step": 84 | |
| }, | |
| { | |
| "loss": 0.6785, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.47, | |
| "step": 85 | |
| }, | |
| { | |
| "loss": 0.6354, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.53, | |
| "step": 86 | |
| }, | |
| { | |
| "loss": 0.6546, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.58, | |
| "step": 87 | |
| }, | |
| { | |
| "loss": 0.6678, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.63, | |
| "step": 88 | |
| }, | |
| { | |
| "loss": 0.6342, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.68, | |
| "step": 89 | |
| }, | |
| { | |
| "loss": 0.6919, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.74, | |
| "step": 90 | |
| }, | |
| { | |
| "loss": 0.6494, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.79, | |
| "step": 91 | |
| }, | |
| { | |
| "loss": 0.6597, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.84, | |
| "step": 92 | |
| }, | |
| { | |
| "loss": 0.6769, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.89, | |
| "step": 93 | |
| }, | |
| { | |
| "loss": 0.6669, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 4.95, | |
| "step": 94 | |
| }, | |
| { | |
| "loss": 0.6585, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 5.0, | |
| "step": 95 | |
| }, | |
| { | |
| "loss": 0.6861, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 5.05, | |
| "step": 96 | |
| }, | |
| { | |
| "loss": 0.6498, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 5.11, | |
| "step": 97 | |
| }, | |
| { | |
| "loss": 0.6466, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 5.16, | |
| "step": 98 | |
| }, | |
| { | |
| "loss": 0.6421, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 5.21, | |
| "step": 99 | |
| }, | |
| { | |
| "loss": 0.6568, | |
| "learning_rate": 7.2e-05, | |
| "epoch": 5.26, | |
| "step": 100 | |
| }, | |
| { | |
| "eval_code_is_correct_loss": 0.7250129580497742, | |
| "eval_code_is_correct_score": -0.2655547261238098, | |
| "eval_code_is_correct_brier_score": 0.2655547261238098, | |
| "eval_code_is_correct_average_probability": 0.49155572056770325, | |
| "eval_code_is_correct_accuracy": 0.48, | |
| "eval_code_is_correct_probabilities": [ | |
| 0.5083851218223572, | |
| 0.4273935854434967, | |
| 0.4364035129547119, | |
| 0.4872526526451111, | |
| 0.37440311908721924, | |
| 0.5888470411300659, | |
| 0.37513232231140137, | |
| 0.48127123713493347, | |
| 0.47687897086143494, | |
| 0.4585302472114563, | |
| 0.4116853177547455, | |
| 0.5032776594161987, | |
| 0.5315262675285339, | |
| 0.5401375889778137, | |
| 0.47433847188949585, | |
| 0.48878082633018494, | |
| 0.5708949565887451, | |
| 0.5461510419845581, | |
| 0.5810272097587585, | |
| 0.4907934367656708, | |
| 0.4523153305053711, | |
| 0.5213923454284668, | |
| 0.40194064378738403, | |
| 0.4633757770061493, | |
| 0.4978197515010834, | |
| 0.5100975632667542, | |
| 0.5173598527908325, | |
| 0.5476617813110352, | |
| 0.45146241784095764, | |
| 0.6570491194725037, | |
| 0.453940212726593, | |
| 0.5251180529594421, | |
| 0.48246118426322937, | |
| 0.4712594449520111, | |
| 0.3543213903903961, | |
| 0.4387568533420563, | |
| 0.45936518907546997, | |
| 0.48073822259902954, | |
| 0.3076059818267822, | |
| 0.5172088742256165, | |
| 0.5109153985977173, | |
| 0.49946558475494385, | |
| 0.7792166471481323, | |
| 0.43944960832595825, | |
| 0.42077937722206116, | |
| 0.5631754398345947, | |
| 0.493571937084198, | |
| 0.49611571431159973, | |
| 0.5187433362007141, | |
| 0.5067903995513916, | |
| 0.48328694701194763, | |
| 0.6678159236907959, | |
| 0.3781387507915497, | |
| 0.3635239005088806, | |
| 0.5364617109298706, | |
| 0.5524997115135193, | |
| 0.632897675037384, | |
| 0.511653482913971, | |
| 0.3425663709640503, | |
| 0.5403125882148743, | |
| 0.47932958602905273, | |
| 0.5451420545578003, | |
| 0.32550284266471863, | |
| 0.7307556867599487, | |
| 0.5360798835754395, | |
| 0.36980098485946655, | |
| 0.5202561616897583, | |
| 0.6240019798278809, | |
| 0.5508140921592712, | |
| 0.5679302215576172, | |
| 0.5044476389884949, | |
| 0.5724747776985168, | |
| 0.5710216760635376, | |
| 0.3993741571903229, | |
| 0.5445007681846619, | |
| 0.4687022268772125, | |
| 0.38320469856262207, | |
| 0.4335331320762634, | |
| 0.44064751267433167, | |
| 0.3625722825527191, | |
| 0.42068955302238464, | |
| 0.3999265134334564, | |
| 0.38937661051750183, | |
| 0.5059962272644043, | |
| 0.48507365584373474, | |
| 0.3598549962043762, | |
| 0.5178597569465637, | |
| 0.518267810344696, | |
| 0.5441962480545044, | |
| 0.4130031168460846, | |
| 0.613496720790863, | |
| 0.45683199167251587, | |
| 0.534511148929596, | |
| 0.5284443497657776, | |
| 0.6089046001434326, | |
| 0.5780519843101501, | |
| 0.544437050819397, | |
| 0.41231974959373474, | |
| 0.5470799207687378, | |
| 0.34541189670562744 | |
| ], | |
| "eval_code_is_correct_runtime": 29.2342, | |
| "eval_code_is_correct_samples_per_second": 3.421, | |
| "eval_code_is_correct_steps_per_second": 0.068, | |
| "epoch": 5.26, | |
| "step": 100 | |
| }, | |
| { | |
| "train_runtime": 1903.4626, | |
| "train_samples_per_second": 1.681, | |
| "train_steps_per_second": 0.053, | |
| "total_flos": 0.0, | |
| "train_loss": 0.6841741448640823, | |
| "epoch": 5.26, | |
| "step": 100 | |
| } | |
| ] |