diff --git "a/log/debug_0.log" "b/log/debug_0.log" new file mode 100644--- /dev/null +++ "b/log/debug_0.log" @@ -0,0 +1,2514 @@ +12/21/2021 14:34:56 - INFO - codeparrot_training - Distributed environment: TPU +Num processes: 8 +Process index: 0 +Local process index: 0 +Device: xla:1 +Use FP16 precision: False + +12/21/2021 14:34:56 - WARNING - huggingface_hub.repository - Revision `robust-sun-12` does not exist. Created and checked out branch `robust-sun-12`. +12/21/2021 14:34:56 - WARNING - huggingface_hub.repository - +12/21/2021 14:35:08 - WARNING - datasets.builder - Using custom data configuration lvwerra___codeparrot-clean-train-a1efdd1059bd841d +12/21/2021 14:35:09 - WARNING - datasets.builder - Using custom data configuration lvwerra___codeparrot-clean-valid-a800eb55c299abc0 +12/21/2021 14:35:51 - INFO - codeparrot_training - Step 0: {'lr': 0.0, 'samples': 512, 'steps': 0, 'batch_loss/train': 1.0193529492244124} +12/21/2021 14:37:13 - INFO - codeparrot_training - Step 1: {'lr': 2.8571428571428573e-06, 'samples': 1024, 'steps': 1, 'batch_loss/train': 0.9981147311627865} +12/21/2021 14:38:39 - INFO - codeparrot_training - Step 2: {'lr': 5.7142857142857145e-06, 'samples': 1536, 'steps': 2, 'batch_loss/train': 0.955647824332118} +12/21/2021 14:38:52 - INFO - codeparrot_training - Step 3: {'lr': 8.571428571428573e-06, 'samples': 2048, 'steps': 3, 'batch_loss/train': 1.0883512431755662} +12/21/2021 14:39:03 - INFO - codeparrot_training - Step 4: {'lr': 1.1428571428571429e-05, 'samples': 2560, 'steps': 4, 'batch_loss/train': 0.8130962830036879} +12/21/2021 14:39:13 - INFO - codeparrot_training - Step 5: {'lr': 1.4285714285714285e-05, 'samples': 3072, 'steps': 5, 'batch_loss/train': 1.0086954981088638} +12/21/2021 14:39:25 - INFO - codeparrot_training - Step 6: {'lr': 1.7142857142857145e-05, 'samples': 3584, 'steps': 6, 'batch_loss/train': 1.0232350481674075} +12/21/2021 14:39:36 - INFO - codeparrot_training - Step 7: {'lr': 2e-05, 'samples': 4096, 'steps': 7, 'batch_loss/train': 0.9230576828122139} +12/21/2021 14:39:47 - INFO - codeparrot_training - Step 8: {'lr': 2.2857142857142858e-05, 'samples': 4608, 'steps': 8, 'batch_loss/train': 0.9473168570548296} +12/21/2021 14:40:00 - INFO - codeparrot_training - Step 9: {'lr': 2.5714285714285714e-05, 'samples': 5120, 'steps': 9, 'batch_loss/train': 0.9869899312034249} +12/21/2021 14:40:10 - INFO - codeparrot_training - Step 10: {'lr': 2.857142857142857e-05, 'samples': 5632, 'steps': 10, 'batch_loss/train': 0.8964740806259215} +12/21/2021 14:40:21 - INFO - codeparrot_training - Step 11: {'lr': 3.142857142857143e-05, 'samples': 6144, 'steps': 11, 'batch_loss/train': 1.0702612651512027} +12/21/2021 14:40:31 - INFO - codeparrot_training - Step 12: {'lr': 3.428571428571429e-05, 'samples': 6656, 'steps': 12, 'batch_loss/train': 1.0014494936913252} +12/21/2021 14:40:43 - INFO - codeparrot_training - Step 13: {'lr': 3.7142857142857143e-05, 'samples': 7168, 'steps': 13, 'batch_loss/train': 0.918277096003294} +12/21/2021 14:40:54 - INFO - codeparrot_training - Step 14: {'lr': 4e-05, 'samples': 7680, 'steps': 14, 'batch_loss/train': 1.0102925430983305} +12/21/2021 14:41:05 - INFO - codeparrot_training - Step 15: {'lr': 4.2857142857142856e-05, 'samples': 8192, 'steps': 15, 'batch_loss/train': 0.9099750043824315} +12/21/2021 14:41:17 - INFO - codeparrot_training - Step 16: {'lr': 4.5714285714285716e-05, 'samples': 8704, 'steps': 16, 'batch_loss/train': 1.0381581708788872} +12/21/2021 14:41:28 - INFO - codeparrot_training - Step 17: {'lr': 4.857142857142857e-05, 'samples': 9216, 'steps': 17, 'batch_loss/train': 0.912262661382556} +12/21/2021 14:41:38 - INFO - codeparrot_training - Step 18: {'lr': 5.142857142857143e-05, 'samples': 9728, 'steps': 18, 'batch_loss/train': 1.0805493760854006} +12/21/2021 14:41:51 - INFO - codeparrot_training - Step 19: {'lr': 5.428571428571429e-05, 'samples': 10240, 'steps': 19, 'batch_loss/train': 0.9622202254831791} +12/21/2021 14:42:02 - INFO - codeparrot_training - Step 20: {'lr': 5.714285714285714e-05, 'samples': 10752, 'steps': 20, 'batch_loss/train': 1.0766527820378542} +12/21/2021 14:42:12 - INFO - codeparrot_training - Step 21: {'lr': 6e-05, 'samples': 11264, 'steps': 21, 'batch_loss/train': 1.3213243894279003} +12/21/2021 14:42:25 - INFO - codeparrot_training - Step 22: {'lr': 6.285714285714286e-05, 'samples': 11776, 'steps': 22, 'batch_loss/train': 1.001508022658527} +12/21/2021 14:42:35 - INFO - codeparrot_training - Step 23: {'lr': 6.571428571428571e-05, 'samples': 12288, 'steps': 23, 'batch_loss/train': 1.0677247643470764} +12/21/2021 14:42:46 - INFO - codeparrot_training - Step 24: {'lr': 6.857142857142858e-05, 'samples': 12800, 'steps': 24, 'batch_loss/train': 0.9136228444986045} +12/21/2021 14:42:56 - INFO - codeparrot_training - Step 25: {'lr': 7.142857142857142e-05, 'samples': 13312, 'steps': 25, 'batch_loss/train': 1.1633764002472162} +12/21/2021 14:43:09 - INFO - codeparrot_training - Step 26: {'lr': 7.428571428571429e-05, 'samples': 13824, 'steps': 26, 'batch_loss/train': 0.9266835525631905} +12/21/2021 14:43:19 - INFO - codeparrot_training - Step 27: {'lr': 7.714285714285714e-05, 'samples': 14336, 'steps': 27, 'batch_loss/train': 0.9802188752219081} +12/21/2021 14:43:30 - INFO - codeparrot_training - Step 28: {'lr': 8e-05, 'samples': 14848, 'steps': 28, 'batch_loss/train': 1.007140651345253} +12/21/2021 14:43:42 - INFO - codeparrot_training - Step 29: {'lr': 8.285714285714286e-05, 'samples': 15360, 'steps': 29, 'batch_loss/train': 0.8645388996228576} +12/21/2021 14:43:53 - INFO - codeparrot_training - Step 30: {'lr': 8.571428571428571e-05, 'samples': 15872, 'steps': 30, 'batch_loss/train': 0.9906511381268501} +12/21/2021 14:44:04 - INFO - codeparrot_training - Step 31: {'lr': 8.857142857142857e-05, 'samples': 16384, 'steps': 31, 'batch_loss/train': 0.8584465784952044} +12/21/2021 14:44:15 - INFO - codeparrot_training - Step 32: {'lr': 9.142857142857143e-05, 'samples': 16896, 'steps': 32, 'batch_loss/train': 0.9680130295455456} +12/21/2021 14:44:26 - INFO - codeparrot_training - Step 33: {'lr': 9.42857142857143e-05, 'samples': 17408, 'steps': 33, 'batch_loss/train': 1.008946900255978} +12/21/2021 14:44:37 - INFO - codeparrot_training - Step 34: {'lr': 9.714285714285714e-05, 'samples': 17920, 'steps': 34, 'batch_loss/train': 1.020456189289689} +12/21/2021 14:44:47 - INFO - codeparrot_training - Step 35: {'lr': 0.0001, 'samples': 18432, 'steps': 35, 'batch_loss/train': 0.9887907225638628} +12/21/2021 14:44:59 - INFO - codeparrot_training - Step 36: {'lr': 0.00010285714285714286, 'samples': 18944, 'steps': 36, 'batch_loss/train': 1.0221164394170046} +12/21/2021 14:45:10 - INFO - codeparrot_training - Step 37: {'lr': 0.00010571428571428572, 'samples': 19456, 'steps': 37, 'batch_loss/train': 1.0078937038779259} +12/21/2021 14:45:20 - INFO - codeparrot_training - Step 38: {'lr': 0.00010857142857142858, 'samples': 19968, 'steps': 38, 'batch_loss/train': 0.9564523370936513} +12/21/2021 14:45:32 - INFO - codeparrot_training - Step 39: {'lr': 0.00011142857142857143, 'samples': 20480, 'steps': 39, 'batch_loss/train': 0.9587605223059654} +12/21/2021 14:45:43 - INFO - codeparrot_training - Step 40: {'lr': 0.00011428571428571428, 'samples': 20992, 'steps': 40, 'batch_loss/train': 0.963530576787889} +12/21/2021 14:45:53 - INFO - codeparrot_training - Step 41: {'lr': 0.00011714285714285715, 'samples': 21504, 'steps': 41, 'batch_loss/train': 0.9486037753522396} +12/21/2021 14:46:06 - INFO - codeparrot_training - Step 42: {'lr': 0.00012, 'samples': 22016, 'steps': 42, 'batch_loss/train': 1.0729197282344103} +12/21/2021 14:46:17 - INFO - codeparrot_training - Step 43: {'lr': 0.00012285714285714287, 'samples': 22528, 'steps': 43, 'batch_loss/train': 0.9991202894598246} +12/21/2021 14:46:27 - INFO - codeparrot_training - Step 44: {'lr': 0.00012571428571428572, 'samples': 23040, 'steps': 44, 'batch_loss/train': 0.9529557069763541} +12/21/2021 14:46:38 - INFO - codeparrot_training - Step 45: {'lr': 0.00012857142857142855, 'samples': 23552, 'steps': 45, 'batch_loss/train': 0.9314988972619176} +12/21/2021 14:46:50 - INFO - codeparrot_training - Step 46: {'lr': 0.00013142857142857143, 'samples': 24064, 'steps': 46, 'batch_loss/train': 0.8918952587991953} +12/21/2021 14:47:01 - INFO - codeparrot_training - Step 47: {'lr': 0.00013428571428571428, 'samples': 24576, 'steps': 47, 'batch_loss/train': 0.9235455105081201} +12/21/2021 14:47:11 - INFO - codeparrot_training - Step 48: {'lr': 0.00013714285714285716, 'samples': 25088, 'steps': 48, 'batch_loss/train': 0.9857792295515537} +12/21/2021 14:47:24 - INFO - codeparrot_training - Step 49: {'lr': 0.00014000000000000001, 'samples': 25600, 'steps': 49, 'batch_loss/train': 0.9763009026646614} +12/21/2021 14:47:35 - INFO - codeparrot_training - Step 50: {'lr': 0.00014285714285714284, 'samples': 26112, 'steps': 50, 'batch_loss/train': 0.9829016849398613} +12/21/2021 14:47:45 - INFO - codeparrot_training - Step 51: {'lr': 0.00014571428571428572, 'samples': 26624, 'steps': 51, 'batch_loss/train': 1.001112732104957} +12/21/2021 14:47:57 - INFO - codeparrot_training - Step 52: {'lr': 0.00014857142857142857, 'samples': 27136, 'steps': 52, 'batch_loss/train': 1.321533925831318} +12/21/2021 14:48:08 - INFO - codeparrot_training - Step 53: {'lr': 0.00015142857142857145, 'samples': 27648, 'steps': 53, 'batch_loss/train': 1.3060968089848757} +12/21/2021 14:48:19 - INFO - codeparrot_training - Step 54: {'lr': 0.00015428571428571428, 'samples': 28160, 'steps': 54, 'batch_loss/train': 0.9784271735697985} +12/21/2021 14:48:29 - INFO - codeparrot_training - Step 55: {'lr': 0.00015714285714285713, 'samples': 28672, 'steps': 55, 'batch_loss/train': 0.9450377775356174} +12/21/2021 14:48:41 - INFO - codeparrot_training - Step 56: {'lr': 0.00016, 'samples': 29184, 'steps': 56, 'batch_loss/train': 0.9179345397278666} +12/21/2021 14:48:52 - INFO - codeparrot_training - Step 57: {'lr': 0.00016285714285714287, 'samples': 29696, 'steps': 57, 'batch_loss/train': 0.9568109698593616} +12/21/2021 14:49:02 - INFO - codeparrot_training - Step 58: {'lr': 0.00016571428571428572, 'samples': 30208, 'steps': 58, 'batch_loss/train': 0.9738078229129314} +12/21/2021 14:49:15 - INFO - codeparrot_training - Step 59: {'lr': 0.00016857142857142857, 'samples': 30720, 'steps': 59, 'batch_loss/train': 1.0150575507432222} +12/21/2021 14:49:26 - INFO - codeparrot_training - Step 60: {'lr': 0.00017142857142857143, 'samples': 31232, 'steps': 60, 'batch_loss/train': 0.9956141086295247} +12/21/2021 14:49:36 - INFO - codeparrot_training - Step 61: {'lr': 0.0001742857142857143, 'samples': 31744, 'steps': 61, 'batch_loss/train': 0.9621291160583496} +12/21/2021 14:49:48 - INFO - codeparrot_training - Step 62: {'lr': 0.00017714285714285713, 'samples': 32256, 'steps': 62, 'batch_loss/train': 0.9962088353931904} +12/21/2021 14:49:59 - INFO - codeparrot_training - Step 63: {'lr': 0.00017999999999999998, 'samples': 32768, 'steps': 63, 'batch_loss/train': 0.9740501055493951} +12/21/2021 14:50:10 - INFO - codeparrot_training - Step 64: {'lr': 0.00018285714285714286, 'samples': 33280, 'steps': 64, 'batch_loss/train': 0.9131401311606169} +12/21/2021 14:50:20 - INFO - codeparrot_training - Step 65: {'lr': 0.00018571428571428572, 'samples': 33792, 'steps': 65, 'batch_loss/train': 1.007351204752922} +12/21/2021 14:50:32 - INFO - codeparrot_training - Step 66: {'lr': 0.0001885714285714286, 'samples': 34304, 'steps': 66, 'batch_loss/train': 1.0498820468783379} +12/21/2021 14:50:43 - INFO - codeparrot_training - Step 67: {'lr': 0.00019142857142857142, 'samples': 34816, 'steps': 67, 'batch_loss/train': 0.982292203232646} +12/21/2021 14:50:53 - INFO - codeparrot_training - Step 68: {'lr': 0.00019428571428571428, 'samples': 35328, 'steps': 68, 'batch_loss/train': 1.0195065662264824} +12/21/2021 14:51:06 - INFO - codeparrot_training - Step 69: {'lr': 0.00019714285714285716, 'samples': 35840, 'steps': 69, 'batch_loss/train': 1.001670149154961} +12/21/2021 14:51:16 - INFO - codeparrot_training - Step 70: {'lr': 0.0002, 'samples': 36352, 'steps': 70, 'batch_loss/train': 1.0421318169683218} +12/21/2021 14:51:27 - INFO - codeparrot_training - Step 71: {'lr': 0.00020285714285714286, 'samples': 36864, 'steps': 71, 'batch_loss/train': 1.0117254294455051} +12/21/2021 14:51:39 - INFO - codeparrot_training - Step 72: {'lr': 0.00020571428571428572, 'samples': 37376, 'steps': 72, 'batch_loss/train': 0.9900305140763521} +12/21/2021 14:51:50 - INFO - codeparrot_training - Step 73: {'lr': 0.00020857142857142857, 'samples': 37888, 'steps': 73, 'batch_loss/train': 0.9446136141195893} +12/21/2021 14:52:00 - INFO - codeparrot_training - Step 74: {'lr': 0.00021142857142857145, 'samples': 38400, 'steps': 74, 'batch_loss/train': 1.0178908160887659} +12/21/2021 14:52:14 - INFO - codeparrot_training - Step 75: {'lr': 0.00021428571428571427, 'samples': 38912, 'steps': 75, 'batch_loss/train': 0.9789360649883747} +12/21/2021 14:52:24 - INFO - codeparrot_training - Step 76: {'lr': 0.00021714285714285715, 'samples': 39424, 'steps': 76, 'batch_loss/train': 1.102635620161891} +12/21/2021 14:52:35 - INFO - codeparrot_training - Step 77: {'lr': 0.00022, 'samples': 39936, 'steps': 77, 'batch_loss/train': 1.0151197109371424} +12/21/2021 14:52:45 - INFO - codeparrot_training - Step 78: {'lr': 0.00022285714285714286, 'samples': 40448, 'steps': 78, 'batch_loss/train': 1.7697611907497048} +12/21/2021 14:52:58 - INFO - codeparrot_training - Step 79: {'lr': 0.00022571428571428571, 'samples': 40960, 'steps': 79, 'batch_loss/train': 0.9738621157594025} +12/21/2021 14:53:08 - INFO - codeparrot_training - Step 80: {'lr': 0.00022857142857142857, 'samples': 41472, 'steps': 80, 'batch_loss/train': 1.020509110763669} +12/21/2021 14:53:19 - INFO - codeparrot_training - Step 81: {'lr': 0.00023142857142857142, 'samples': 41984, 'steps': 81, 'batch_loss/train': 1.0166559685021639} +12/21/2021 14:53:31 - INFO - codeparrot_training - Step 82: {'lr': 0.0002342857142857143, 'samples': 42496, 'steps': 82, 'batch_loss/train': 1.2909208964556456} +12/21/2021 14:53:41 - INFO - codeparrot_training - Step 83: {'lr': 0.00023714285714285715, 'samples': 43008, 'steps': 83, 'batch_loss/train': 0.9644039301201701} +12/21/2021 14:53:52 - INFO - codeparrot_training - Step 84: {'lr': 0.00024, 'samples': 43520, 'steps': 84, 'batch_loss/train': 1.0707816677168012} +12/21/2021 14:54:04 - INFO - codeparrot_training - Step 85: {'lr': 0.00024285714285714286, 'samples': 44032, 'steps': 85, 'batch_loss/train': 1.1155021954327822} +12/21/2021 14:54:15 - INFO - codeparrot_training - Step 86: {'lr': 0.00024571428571428574, 'samples': 44544, 'steps': 86, 'batch_loss/train': 0.9685124810785055} +12/21/2021 14:54:25 - INFO - codeparrot_training - Step 87: {'lr': 0.00024857142857142857, 'samples': 45056, 'steps': 87, 'batch_loss/train': 1.0195770990103483} +12/21/2021 14:54:36 - INFO - codeparrot_training - Step 88: {'lr': 0.00025142857142857145, 'samples': 45568, 'steps': 88, 'batch_loss/train': 0.9976689526811242} +12/21/2021 14:54:49 - INFO - codeparrot_training - Step 89: {'lr': 0.00025428571428571427, 'samples': 46080, 'steps': 89, 'batch_loss/train': 1.0429263738915324} +12/21/2021 14:55:00 - INFO - codeparrot_training - Step 90: {'lr': 0.0002571428571428571, 'samples': 46592, 'steps': 90, 'batch_loss/train': 0.9543298054486513} +12/21/2021 14:55:10 - INFO - codeparrot_training - Step 91: {'lr': 0.00026000000000000003, 'samples': 47104, 'steps': 91, 'batch_loss/train': 0.9885049089789391} +12/21/2021 14:55:22 - INFO - codeparrot_training - Step 92: {'lr': 0.00026285714285714286, 'samples': 47616, 'steps': 92, 'batch_loss/train': 0.7912878496572375} +12/21/2021 14:55:33 - INFO - codeparrot_training - Step 93: {'lr': 0.00026571428571428574, 'samples': 48128, 'steps': 93, 'batch_loss/train': 0.9988777888938785} +12/21/2021 14:55:43 - INFO - codeparrot_training - Step 94: {'lr': 0.00026857142857142856, 'samples': 48640, 'steps': 94, 'batch_loss/train': 0.9855301873758435} +12/21/2021 14:55:56 - INFO - codeparrot_training - Step 95: {'lr': 0.0002714285714285714, 'samples': 49152, 'steps': 95, 'batch_loss/train': 0.9779925588518381} +12/21/2021 14:56:07 - INFO - codeparrot_training - Step 96: {'lr': 0.0002742857142857143, 'samples': 49664, 'steps': 96, 'batch_loss/train': 1.0134917721152306} +12/21/2021 14:56:17 - INFO - codeparrot_training - Step 97: {'lr': 0.00027714285714285715, 'samples': 50176, 'steps': 97, 'batch_loss/train': 0.9322703145444393} +12/21/2021 14:56:28 - INFO - codeparrot_training - Step 98: {'lr': 0.00028000000000000003, 'samples': 50688, 'steps': 98, 'batch_loss/train': 1.0615130085498095} +12/21/2021 14:56:40 - INFO - codeparrot_training - Step 99: {'lr': 0.00028285714285714286, 'samples': 51200, 'steps': 99, 'batch_loss/train': 0.944296664558351} +12/21/2021 14:56:51 - INFO - codeparrot_training - Step 100: {'lr': 0.0002857142857142857, 'samples': 51712, 'steps': 100, 'batch_loss/train': 0.9070937177166343} +12/21/2021 14:57:01 - INFO - codeparrot_training - Step 101: {'lr': 0.0002885714285714286, 'samples': 52224, 'steps': 101, 'batch_loss/train': 1.016639574430883} +12/21/2021 14:57:13 - INFO - codeparrot_training - Step 102: {'lr': 0.00029142857142857144, 'samples': 52736, 'steps': 102, 'batch_loss/train': 0.9732032003812492} +12/21/2021 14:57:24 - INFO - codeparrot_training - Step 103: {'lr': 0.00029428571428571427, 'samples': 53248, 'steps': 103, 'batch_loss/train': 1.0322233149781823} +12/21/2021 14:57:34 - INFO - codeparrot_training - Step 104: {'lr': 0.00029714285714285715, 'samples': 53760, 'steps': 104, 'batch_loss/train': 0.9859379809349775} +12/21/2021 14:57:47 - INFO - codeparrot_training - Step 105: {'lr': 0.0003, 'samples': 54272, 'steps': 105, 'batch_loss/train': 0.8461151635274291} +12/21/2021 14:57:58 - INFO - codeparrot_training - Step 106: {'lr': 0.0003028571428571429, 'samples': 54784, 'steps': 106, 'batch_loss/train': 1.1073736632242799} +12/21/2021 14:58:08 - INFO - codeparrot_training - Step 107: {'lr': 0.00030571428571428573, 'samples': 55296, 'steps': 107, 'batch_loss/train': 0.9310879334807396} +12/21/2021 14:58:20 - INFO - codeparrot_training - Step 108: {'lr': 0.00030857142857142856, 'samples': 55808, 'steps': 108, 'batch_loss/train': 1.0085503365844488} +12/21/2021 14:58:31 - INFO - codeparrot_training - Step 109: {'lr': 0.00031142857142857144, 'samples': 56320, 'steps': 109, 'batch_loss/train': 1.0120310503989458} +12/21/2021 14:58:42 - INFO - codeparrot_training - Step 110: {'lr': 0.00031428571428571427, 'samples': 56832, 'steps': 110, 'batch_loss/train': 1.012949876487255} +12/21/2021 14:58:52 - INFO - codeparrot_training - Step 111: {'lr': 0.00031714285714285715, 'samples': 57344, 'steps': 111, 'batch_loss/train': 0.9427720690146089} +12/21/2021 14:59:04 - INFO - codeparrot_training - Step 112: {'lr': 0.00032, 'samples': 57856, 'steps': 112, 'batch_loss/train': 0.9724619053304195} +12/21/2021 14:59:15 - INFO - codeparrot_training - Step 113: {'lr': 0.00032285714285714285, 'samples': 58368, 'steps': 113, 'batch_loss/train': 1.0471261069178581} +12/21/2021 14:59:25 - INFO - codeparrot_training - Step 114: {'lr': 0.00032571428571428573, 'samples': 58880, 'steps': 114, 'batch_loss/train': 0.9464971609413624} +12/21/2021 14:59:37 - INFO - codeparrot_training - Step 115: {'lr': 0.00032857142857142856, 'samples': 59392, 'steps': 115, 'batch_loss/train': 1.0060633458197117} +12/21/2021 14:59:48 - INFO - codeparrot_training - Step 116: {'lr': 0.00033142857142857144, 'samples': 59904, 'steps': 116, 'batch_loss/train': 1.039223862811923} +12/21/2021 14:59:58 - INFO - codeparrot_training - Step 117: {'lr': 0.0003342857142857143, 'samples': 60416, 'steps': 117, 'batch_loss/train': 0.9779045283794403} +12/21/2021 15:00:11 - INFO - codeparrot_training - Step 118: {'lr': 0.00033714285714285714, 'samples': 60928, 'steps': 118, 'batch_loss/train': 0.9705766662955284} +12/21/2021 15:00:22 - INFO - codeparrot_training - Step 119: {'lr': 0.00034, 'samples': 61440, 'steps': 119, 'batch_loss/train': 0.9785442650318146} +12/21/2021 15:00:32 - INFO - codeparrot_training - Step 120: {'lr': 0.00034285714285714285, 'samples': 61952, 'steps': 120, 'batch_loss/train': 0.9353706063702703} +12/21/2021 15:00:43 - INFO - codeparrot_training - Step 121: {'lr': 0.00034571428571428573, 'samples': 62464, 'steps': 121, 'batch_loss/train': 0.9075854606926441} +12/21/2021 15:00:55 - INFO - codeparrot_training - Step 122: {'lr': 0.0003485714285714286, 'samples': 62976, 'steps': 122, 'batch_loss/train': 1.002996614202857} +12/21/2021 15:01:05 - INFO - codeparrot_training - Step 123: {'lr': 0.00035142857142857144, 'samples': 63488, 'steps': 123, 'batch_loss/train': 0.8917915541678667} +12/21/2021 15:01:16 - INFO - codeparrot_training - Step 124: {'lr': 0.00035428571428571426, 'samples': 64000, 'steps': 124, 'batch_loss/train': 1.020295524969697} +12/21/2021 15:01:28 - INFO - codeparrot_training - Step 125: {'lr': 0.00035714285714285714, 'samples': 64512, 'steps': 125, 'batch_loss/train': 1.069016046822071} +12/21/2021 15:01:39 - INFO - codeparrot_training - Step 126: {'lr': 0.00035999999999999997, 'samples': 65024, 'steps': 126, 'batch_loss/train': 0.8671705303713679} +12/21/2021 15:01:49 - INFO - codeparrot_training - Step 127: {'lr': 0.0003628571428571429, 'samples': 65536, 'steps': 127, 'batch_loss/train': 1.2122967820614576} +12/21/2021 15:02:03 - INFO - codeparrot_training - Step 128: {'lr': 0.00036571428571428573, 'samples': 66048, 'steps': 128, 'batch_loss/train': 0.9741223966702819} +12/21/2021 15:02:13 - INFO - codeparrot_training - Step 129: {'lr': 0.00036857142857142855, 'samples': 66560, 'steps': 129, 'batch_loss/train': 0.9305755216628313} +12/21/2021 15:02:24 - INFO - codeparrot_training - Step 130: {'lr': 0.00037142857142857143, 'samples': 67072, 'steps': 130, 'batch_loss/train': 1.5241616610437632} +12/21/2021 15:02:35 - INFO - codeparrot_training - Step 131: {'lr': 0.00037428571428571426, 'samples': 67584, 'steps': 131, 'batch_loss/train': 0.9661178383976221} +12/21/2021 15:02:46 - INFO - codeparrot_training - Step 132: {'lr': 0.0003771428571428572, 'samples': 68096, 'steps': 132, 'batch_loss/train': 1.0141914580017328} +12/21/2021 15:02:57 - INFO - codeparrot_training - Step 133: {'lr': 0.00038, 'samples': 68608, 'steps': 133, 'batch_loss/train': 0.9654294233769178} +12/21/2021 15:03:08 - INFO - codeparrot_training - Step 134: {'lr': 0.00038285714285714285, 'samples': 69120, 'steps': 134, 'batch_loss/train': 0.948192348703742} +12/21/2021 15:03:20 - INFO - codeparrot_training - Step 135: {'lr': 0.0003857142857142857, 'samples': 69632, 'steps': 135, 'batch_loss/train': 1.0017092926427722} +12/21/2021 15:03:31 - INFO - codeparrot_training - Step 136: {'lr': 0.00038857142857142855, 'samples': 70144, 'steps': 136, 'batch_loss/train': 0.9743279106914997} +12/21/2021 15:03:42 - INFO - codeparrot_training - Step 137: {'lr': 0.00039142857142857143, 'samples': 70656, 'steps': 137, 'batch_loss/train': 0.8830758472904563} +12/21/2021 15:03:53 - INFO - codeparrot_training - Step 138: {'lr': 0.0003942857142857143, 'samples': 71168, 'steps': 138, 'batch_loss/train': 1.062146995216608} +12/21/2021 15:04:04 - INFO - codeparrot_training - Step 139: {'lr': 0.00039714285714285714, 'samples': 71680, 'steps': 139, 'batch_loss/train': 1.0399387907236814} +12/21/2021 15:04:15 - INFO - codeparrot_training - Step 140: {'lr': 0.0004, 'samples': 72192, 'steps': 140, 'batch_loss/train': 1.0582102639600635} +12/21/2021 15:04:25 - INFO - codeparrot_training - Step 141: {'lr': 0.00040285714285714285, 'samples': 72704, 'steps': 141, 'batch_loss/train': 1.0336521156132221} +12/21/2021 15:04:37 - INFO - codeparrot_training - Step 142: {'lr': 0.0004057142857142857, 'samples': 73216, 'steps': 142, 'batch_loss/train': 0.8318402171134949} +12/21/2021 15:04:48 - INFO - codeparrot_training - Step 143: {'lr': 0.0004085714285714286, 'samples': 73728, 'steps': 143, 'batch_loss/train': 0.8727374253794551} +12/21/2021 15:04:59 - INFO - codeparrot_training - Step 144: {'lr': 0.00041142857142857143, 'samples': 74240, 'steps': 144, 'batch_loss/train': 1.0500088389962912} +12/21/2021 15:05:11 - INFO - codeparrot_training - Step 145: {'lr': 0.0004142857142857143, 'samples': 74752, 'steps': 145, 'batch_loss/train': 1.0643755737692118} +12/21/2021 15:05:22 - INFO - codeparrot_training - Step 146: {'lr': 0.00041714285714285714, 'samples': 75264, 'steps': 146, 'batch_loss/train': 0.9461820563301444} +12/21/2021 15:05:32 - INFO - codeparrot_training - Step 147: {'lr': 0.00042, 'samples': 75776, 'steps': 147, 'batch_loss/train': 1.0224799066781998} +12/21/2021 15:05:44 - INFO - codeparrot_training - Step 148: {'lr': 0.0004228571428571429, 'samples': 76288, 'steps': 148, 'batch_loss/train': 0.8724416689947248} +12/21/2021 15:05:55 - INFO - codeparrot_training - Step 149: {'lr': 0.0004257142857142857, 'samples': 76800, 'steps': 149, 'batch_loss/train': 0.9072890188544989} +12/21/2021 15:06:06 - INFO - codeparrot_training - Step 150: {'lr': 0.00042857142857142855, 'samples': 77312, 'steps': 150, 'batch_loss/train': 0.9915821412578225} +12/21/2021 15:06:16 - INFO - codeparrot_training - Step 151: {'lr': 0.00043142857142857143, 'samples': 77824, 'steps': 151, 'batch_loss/train': 1.0291715785861015} +12/21/2021 15:06:29 - INFO - codeparrot_training - Step 152: {'lr': 0.0004342857142857143, 'samples': 78336, 'steps': 152, 'batch_loss/train': 1.022611951455474} +12/21/2021 15:06:39 - INFO - codeparrot_training - Step 153: {'lr': 0.0004371428571428572, 'samples': 78848, 'steps': 153, 'batch_loss/train': 1.0028812251985073} +12/21/2021 15:06:50 - INFO - codeparrot_training - Step 154: {'lr': 0.00044, 'samples': 79360, 'steps': 154, 'batch_loss/train': 1.1939127957448363} +12/21/2021 15:07:02 - INFO - codeparrot_training - Step 155: {'lr': 0.00044285714285714284, 'samples': 79872, 'steps': 155, 'batch_loss/train': 0.9952369630336761} +12/21/2021 15:07:13 - INFO - codeparrot_training - Step 156: {'lr': 0.0004457142857142857, 'samples': 80384, 'steps': 156, 'batch_loss/train': 1.0121058207005262} +12/21/2021 15:07:23 - INFO - codeparrot_training - Step 157: {'lr': 0.0004485714285714286, 'samples': 80896, 'steps': 157, 'batch_loss/train': 0.98527164850384} +12/21/2021 15:07:35 - INFO - codeparrot_training - Step 158: {'lr': 0.00045142857142857143, 'samples': 81408, 'steps': 158, 'batch_loss/train': 0.9504385013133287} +12/21/2021 15:07:46 - INFO - codeparrot_training - Step 159: {'lr': 0.0004542857142857143, 'samples': 81920, 'steps': 159, 'batch_loss/train': 0.9669047053903341} +12/21/2021 15:07:56 - INFO - codeparrot_training - Step 160: {'lr': 0.00045714285714285713, 'samples': 82432, 'steps': 160, 'batch_loss/train': 1.0060318671166897} +12/21/2021 15:08:07 - INFO - codeparrot_training - Step 161: {'lr': 0.00046, 'samples': 82944, 'steps': 161, 'batch_loss/train': 1.0761142205446959} +12/21/2021 15:08:19 - INFO - codeparrot_training - Step 162: {'lr': 0.00046285714285714284, 'samples': 83456, 'steps': 162, 'batch_loss/train': 0.9274779781699181} +12/21/2021 15:08:30 - INFO - codeparrot_training - Step 163: {'lr': 0.0004657142857142857, 'samples': 83968, 'steps': 163, 'batch_loss/train': 1.0799931325018406} +12/21/2021 15:08:40 - INFO - codeparrot_training - Step 164: {'lr': 0.0004685714285714286, 'samples': 84480, 'steps': 164, 'batch_loss/train': 0.8160594068467617} +12/21/2021 15:08:54 - INFO - codeparrot_training - Step 165: {'lr': 0.0004714285714285714, 'samples': 84992, 'steps': 165, 'batch_loss/train': 0.9737982749938965} +12/21/2021 15:09:05 - INFO - codeparrot_training - Step 166: {'lr': 0.0004742857142857143, 'samples': 85504, 'steps': 166, 'batch_loss/train': 0.9546704925596714} +12/21/2021 15:09:15 - INFO - codeparrot_training - Step 167: {'lr': 0.00047714285714285713, 'samples': 86016, 'steps': 167, 'batch_loss/train': 0.9489555452018976} +12/21/2021 15:09:27 - INFO - codeparrot_training - Step 168: {'lr': 0.00048, 'samples': 86528, 'steps': 168, 'batch_loss/train': 0.7765848562121391} +12/21/2021 15:09:38 - INFO - codeparrot_training - Step 169: {'lr': 0.0004828571428571429, 'samples': 87040, 'steps': 169, 'batch_loss/train': 1.0046934774145484} +12/21/2021 15:09:48 - INFO - codeparrot_training - Step 170: {'lr': 0.0004857142857142857, 'samples': 87552, 'steps': 170, 'batch_loss/train': 0.8790218532085419} +12/21/2021 15:09:59 - INFO - codeparrot_training - Step 171: {'lr': 0.0004885714285714286, 'samples': 88064, 'steps': 171, 'batch_loss/train': 0.9702072646468878} +12/21/2021 15:10:11 - INFO - codeparrot_training - Step 172: {'lr': 0.0004914285714285715, 'samples': 88576, 'steps': 172, 'batch_loss/train': 0.9177612056955695} +12/21/2021 15:10:21 - INFO - codeparrot_training - Step 173: {'lr': 0.0004942857142857143, 'samples': 89088, 'steps': 173, 'batch_loss/train': 1.0101577769964933} +12/21/2021 15:10:32 - INFO - codeparrot_training - Step 174: {'lr': 0.0004971428571428571, 'samples': 89600, 'steps': 174, 'batch_loss/train': 0.9843772668391466} +12/21/2021 15:10:45 - INFO - codeparrot_training - Step 175: {'lr': 0.0005, 'samples': 90112, 'steps': 175, 'batch_loss/train': 1.0309578496962786} +12/21/2021 15:10:55 - INFO - codeparrot_training - Step 176: {'lr': 0.0004999999995030472, 'samples': 90624, 'steps': 176, 'batch_loss/train': 0.9139007637277246} +12/21/2021 15:11:06 - INFO - codeparrot_training - Step 177: {'lr': 0.0004999999980121888, 'samples': 91136, 'steps': 177, 'batch_loss/train': 1.0263297399505973} +12/21/2021 15:11:18 - INFO - codeparrot_training - Step 178: {'lr': 0.0004999999955274248, 'samples': 91648, 'steps': 178, 'batch_loss/train': 0.9592798547819257} +12/21/2021 15:11:29 - INFO - codeparrot_training - Step 179: {'lr': 0.0004999999920487552, 'samples': 92160, 'steps': 179, 'batch_loss/train': 0.8388644908554852} +12/21/2021 15:11:39 - INFO - codeparrot_training - Step 180: {'lr': 0.00049999998757618, 'samples': 92672, 'steps': 180, 'batch_loss/train': 0.9975963737815619} +12/21/2021 15:11:50 - INFO - codeparrot_training - Step 181: {'lr': 0.0004999999821096993, 'samples': 93184, 'steps': 181, 'batch_loss/train': 0.9878550455905497} +12/21/2021 15:12:03 - INFO - codeparrot_training - Step 182: {'lr': 0.0004999999756493131, 'samples': 93696, 'steps': 182, 'batch_loss/train': 1.0588420629501343} +12/21/2021 15:12:13 - INFO - codeparrot_training - Step 183: {'lr': 0.0004999999681950213, 'samples': 94208, 'steps': 183, 'batch_loss/train': 0.996025518514216} +12/21/2021 15:12:24 - INFO - codeparrot_training - Step 184: {'lr': 0.0004999999597468241, 'samples': 94720, 'steps': 184, 'batch_loss/train': 0.8855784628540277} +12/21/2021 15:12:36 - INFO - codeparrot_training - Step 185: {'lr': 0.0004999999503047214, 'samples': 95232, 'steps': 185, 'batch_loss/train': 0.9955014921724796} +12/21/2021 15:12:47 - INFO - codeparrot_training - Step 186: {'lr': 0.0004999999398687133, 'samples': 95744, 'steps': 186, 'batch_loss/train': 0.9876936078071594} +12/21/2021 15:12:57 - INFO - codeparrot_training - Step 187: {'lr': 0.0004999999284387999, 'samples': 96256, 'steps': 187, 'batch_loss/train': 0.9578468287363648} +12/21/2021 15:13:09 - INFO - codeparrot_training - Step 188: {'lr': 0.0004999999160149812, 'samples': 96768, 'steps': 188, 'batch_loss/train': 1.3733998239040375} +12/21/2021 15:13:20 - INFO - codeparrot_training - Step 189: {'lr': 0.0004999999025972571, 'samples': 97280, 'steps': 189, 'batch_loss/train': 0.8602615557610989} +12/21/2021 15:13:30 - INFO - codeparrot_training - Step 190: {'lr': 0.0004999998881856279, 'samples': 97792, 'steps': 190, 'batch_loss/train': 0.9249651916325092} +12/21/2021 15:13:41 - INFO - codeparrot_training - Step 191: {'lr': 0.0004999998727800935, 'samples': 98304, 'steps': 191, 'batch_loss/train': 1.0908228866755962} +12/21/2021 15:13:53 - INFO - codeparrot_training - Step 192: {'lr': 0.000499999856380654, 'samples': 98816, 'steps': 192, 'batch_loss/train': 0.7210814044810832} +12/21/2021 15:14:04 - INFO - codeparrot_training - Step 193: {'lr': 0.0004999998389873094, 'samples': 99328, 'steps': 193, 'batch_loss/train': 1.003062218427658} +12/21/2021 15:14:15 - INFO - codeparrot_training - Step 194: {'lr': 0.0004999998206000599, 'samples': 99840, 'steps': 194, 'batch_loss/train': 0.9711842276155949} +12/21/2021 15:14:27 - INFO - codeparrot_training - Step 195: {'lr': 0.0004999998012189056, 'samples': 100352, 'steps': 195, 'batch_loss/train': 1.0285911057144403} +12/21/2021 15:14:38 - INFO - codeparrot_training - Step 196: {'lr': 0.0004999997808438464, 'samples': 100864, 'steps': 196, 'batch_loss/train': 0.8522499427199364} +12/21/2021 15:14:49 - INFO - codeparrot_training - Step 197: {'lr': 0.0004999997594748824, 'samples': 101376, 'steps': 197, 'batch_loss/train': 1.0216512009501457} +12/21/2021 15:15:00 - INFO - codeparrot_training - Step 198: {'lr': 0.0004999997371120139, 'samples': 101888, 'steps': 198, 'batch_loss/train': 0.9640177562832832} +12/21/2021 15:15:11 - INFO - codeparrot_training - Step 199: {'lr': 0.0004999997137552407, 'samples': 102400, 'steps': 199, 'batch_loss/train': 1.0707423686981201} +12/21/2021 15:15:22 - INFO - codeparrot_training - Step 200: {'lr': 0.000499999689404563, 'samples': 102912, 'steps': 200, 'batch_loss/train': 1.0855901474133134} +12/21/2021 15:15:32 - INFO - codeparrot_training - Step 201: {'lr': 0.000499999664059981, 'samples': 103424, 'steps': 201, 'batch_loss/train': 0.9749338757246733} +12/21/2021 15:15:44 - INFO - codeparrot_training - Step 202: {'lr': 0.0004999996377214949, 'samples': 103936, 'steps': 202, 'batch_loss/train': 0.9779012911021709} +12/21/2021 15:15:55 - INFO - codeparrot_training - Step 203: {'lr': 0.0004999996103891045, 'samples': 104448, 'steps': 203, 'batch_loss/train': 0.9549416210502386} +12/21/2021 15:16:05 - INFO - codeparrot_training - Step 204: {'lr': 0.00049999958206281, 'samples': 104960, 'steps': 204, 'batch_loss/train': 0.9440921135246754} +12/21/2021 15:16:18 - INFO - codeparrot_training - Step 205: {'lr': 0.0004999995527426116, 'samples': 105472, 'steps': 205, 'batch_loss/train': 0.9560529226437211} +12/21/2021 15:16:29 - INFO - codeparrot_training - Step 206: {'lr': 0.0004999995224285093, 'samples': 105984, 'steps': 206, 'batch_loss/train': 0.9873572085052729} +12/21/2021 15:16:39 - INFO - codeparrot_training - Step 207: {'lr': 0.0004999994911205034, 'samples': 106496, 'steps': 207, 'batch_loss/train': 1.0310314800590277} +12/21/2021 15:16:51 - INFO - codeparrot_training - Step 208: {'lr': 0.0004999994588185939, 'samples': 107008, 'steps': 208, 'batch_loss/train': 0.9547451809048653} +12/21/2021 15:17:02 - INFO - codeparrot_training - Step 209: {'lr': 0.0004999994255227809, 'samples': 107520, 'steps': 209, 'batch_loss/train': 0.9218645989894867} +12/21/2021 15:17:13 - INFO - codeparrot_training - Step 210: {'lr': 0.0004999993912330646, 'samples': 108032, 'steps': 210, 'batch_loss/train': 0.94939138693735} +12/21/2021 15:17:25 - INFO - codeparrot_training - Step 211: {'lr': 0.0004999993559494452, 'samples': 108544, 'steps': 211, 'batch_loss/train': 0.9946675049141049} +12/21/2021 15:17:36 - INFO - codeparrot_training - Step 212: {'lr': 0.0004999993196719226, 'samples': 109056, 'steps': 212, 'batch_loss/train': 0.9801458483561873} +12/21/2021 15:17:47 - INFO - codeparrot_training - Step 213: {'lr': 0.0004999992824004972, 'samples': 109568, 'steps': 213, 'batch_loss/train': 0.9083622833713889} +12/21/2021 15:17:57 - INFO - codeparrot_training - Step 214: {'lr': 0.0004999992441351691, 'samples': 110080, 'steps': 214, 'batch_loss/train': 0.8982020476832986} +12/21/2021 15:18:09 - INFO - codeparrot_training - Step 215: {'lr': 0.0004999992048759383, 'samples': 110592, 'steps': 215, 'batch_loss/train': 0.9808334819972515} +12/21/2021 15:18:20 - INFO - codeparrot_training - Step 216: {'lr': 0.0004999991646228051, 'samples': 111104, 'steps': 216, 'batch_loss/train': 0.9249397749081254} +12/21/2021 15:18:31 - INFO - codeparrot_training - Step 217: {'lr': 0.0004999991233757696, 'samples': 111616, 'steps': 217, 'batch_loss/train': 0.8354386785067618} +12/21/2021 15:18:43 - INFO - codeparrot_training - Step 218: {'lr': 0.000499999081134832, 'samples': 112128, 'steps': 218, 'batch_loss/train': 0.8332201889716089} +12/21/2021 15:18:53 - INFO - codeparrot_training - Step 219: {'lr': 0.0004999990378999925, 'samples': 112640, 'steps': 219, 'batch_loss/train': 0.9582055304199457} +12/21/2021 15:19:04 - INFO - codeparrot_training - Step 220: {'lr': 0.0004999989936712512, 'samples': 113152, 'steps': 220, 'batch_loss/train': 1.078530428931117} +12/21/2021 15:19:16 - INFO - codeparrot_training - Step 221: {'lr': 0.0004999989484486082, 'samples': 113664, 'steps': 221, 'batch_loss/train': 1.0509033929556608} +12/21/2021 15:19:27 - INFO - codeparrot_training - Step 222: {'lr': 0.0004999989022320638, 'samples': 114176, 'steps': 222, 'batch_loss/train': 1.0024668099358678} +12/21/2021 15:19:37 - INFO - codeparrot_training - Step 223: {'lr': 0.0004999988550216182, 'samples': 114688, 'steps': 223, 'batch_loss/train': 0.9155860748142004} +12/21/2021 15:19:48 - INFO - codeparrot_training - Step 224: {'lr': 0.0004999988068172715, 'samples': 115200, 'steps': 224, 'batch_loss/train': 1.0199661303777248} +12/21/2021 15:20:00 - INFO - codeparrot_training - Step 225: {'lr': 0.000499998757619024, 'samples': 115712, 'steps': 225, 'batch_loss/train': 0.9875146131962538} +12/21/2021 15:20:11 - INFO - codeparrot_training - Step 226: {'lr': 0.0004999987074268758, 'samples': 116224, 'steps': 226, 'batch_loss/train': 0.9970517549663782} +12/21/2021 15:20:21 - INFO - codeparrot_training - Step 227: {'lr': 0.0004999986562408272, 'samples': 116736, 'steps': 227, 'batch_loss/train': 0.9353131148964167} +12/21/2021 15:20:33 - INFO - codeparrot_training - Step 228: {'lr': 0.0004999986040608782, 'samples': 117248, 'steps': 228, 'batch_loss/train': 0.9688568478450179} +12/21/2021 15:20:44 - INFO - codeparrot_training - Step 229: {'lr': 0.0004999985508870293, 'samples': 117760, 'steps': 229, 'batch_loss/train': 0.982265992090106} +12/21/2021 15:20:55 - INFO - codeparrot_training - Step 230: {'lr': 0.0004999984967192806, 'samples': 118272, 'steps': 230, 'batch_loss/train': 0.9376679211854935} +12/21/2021 15:21:07 - INFO - codeparrot_training - Step 231: {'lr': 0.0004999984415576321, 'samples': 118784, 'steps': 231, 'batch_loss/train': 0.955386396497488} +12/21/2021 15:21:17 - INFO - codeparrot_training - Step 232: {'lr': 0.0004999983854020844, 'samples': 119296, 'steps': 232, 'batch_loss/train': 1.0152222216129303} +12/21/2021 15:21:28 - INFO - codeparrot_training - Step 233: {'lr': 0.0004999983282526373, 'samples': 119808, 'steps': 233, 'batch_loss/train': 1.0677617341279984} +12/21/2021 15:21:38 - INFO - codeparrot_training - Step 234: {'lr': 0.0004999982701092912, 'samples': 120320, 'steps': 234, 'batch_loss/train': 0.9254444502294064} +12/21/2021 15:21:51 - INFO - codeparrot_training - Step 235: {'lr': 0.0004999982109720466, 'samples': 120832, 'steps': 235, 'batch_loss/train': 0.9195733573287725} +12/21/2021 15:22:02 - INFO - codeparrot_training - Step 236: {'lr': 0.0004999981508409034, 'samples': 121344, 'steps': 236, 'batch_loss/train': 1.5990784456953406} +12/21/2021 15:22:12 - INFO - codeparrot_training - Step 237: {'lr': 0.0004999980897158619, 'samples': 121856, 'steps': 237, 'batch_loss/train': 0.964858983643353} +12/21/2021 15:22:25 - INFO - codeparrot_training - Step 238: {'lr': 0.0004999980275969224, 'samples': 122368, 'steps': 238, 'batch_loss/train': 0.9365309840068221} +12/21/2021 15:22:35 - INFO - codeparrot_training - Step 239: {'lr': 0.0004999979644840852, 'samples': 122880, 'steps': 239, 'batch_loss/train': 1.0039056641981006} +12/21/2021 15:22:46 - INFO - codeparrot_training - Step 240: {'lr': 0.0004999979003773504, 'samples': 123392, 'steps': 240, 'batch_loss/train': 0.993403622880578} +12/21/2021 15:22:56 - INFO - codeparrot_training - Step 241: {'lr': 0.0004999978352767185, 'samples': 123904, 'steps': 241, 'batch_loss/train': 0.9287360450252891} +12/21/2021 15:23:09 - INFO - codeparrot_training - Step 242: {'lr': 0.0004999977691821896, 'samples': 124416, 'steps': 242, 'batch_loss/train': 0.8893203837797046} +12/21/2021 15:23:19 - INFO - codeparrot_training - Step 243: {'lr': 0.0004999977020937638, 'samples': 124928, 'steps': 243, 'batch_loss/train': 0.9247511001303792} +12/21/2021 15:23:30 - INFO - codeparrot_training - Step 244: {'lr': 0.0004999976340114416, 'samples': 125440, 'steps': 244, 'batch_loss/train': 0.9693126305937767} +12/21/2021 15:23:42 - INFO - codeparrot_training - Step 245: {'lr': 0.0004999975649352233, 'samples': 125952, 'steps': 245, 'batch_loss/train': 0.8935371916741133} +12/21/2021 15:23:53 - INFO - codeparrot_training - Step 246: {'lr': 0.0004999974948651089, 'samples': 126464, 'steps': 246, 'batch_loss/train': 0.9815532034263015} +12/21/2021 15:24:03 - INFO - codeparrot_training - Step 247: {'lr': 0.000499997423801099, 'samples': 126976, 'steps': 247, 'batch_loss/train': 2.1787368413060904} +12/21/2021 15:24:15 - INFO - codeparrot_training - Step 248: {'lr': 0.0004999973517431937, 'samples': 127488, 'steps': 248, 'batch_loss/train': 0.9655200140550733} +12/21/2021 15:24:26 - INFO - codeparrot_training - Step 249: {'lr': 0.0004999972786913934, 'samples': 128000, 'steps': 249, 'batch_loss/train': 1.0043728100135922} +12/21/2021 15:24:36 - INFO - codeparrot_training - Step 250: {'lr': 0.0004999972046456981, 'samples': 128512, 'steps': 250, 'batch_loss/train': 0.9132165648043156} +12/21/2021 15:24:49 - INFO - codeparrot_training - Step 251: {'lr': 0.0004999971296061085, 'samples': 129024, 'steps': 251, 'batch_loss/train': 1.0024230564013124} +12/21/2021 15:25:00 - INFO - codeparrot_training - Step 252: {'lr': 0.0004999970535726246, 'samples': 129536, 'steps': 252, 'batch_loss/train': 0.9403008110821247} +12/21/2021 15:25:10 - INFO - codeparrot_training - Step 253: {'lr': 0.0004999969765452468, 'samples': 130048, 'steps': 253, 'batch_loss/train': 0.9630314344540238} +12/21/2021 15:25:21 - INFO - codeparrot_training - Step 254: {'lr': 0.0004999968985239755, 'samples': 130560, 'steps': 254, 'batch_loss/train': 1.0133339166641235} +12/21/2021 15:25:33 - INFO - codeparrot_training - Step 255: {'lr': 0.0004999968195088109, 'samples': 131072, 'steps': 255, 'batch_loss/train': 0.9787106551229954} +12/21/2021 15:25:43 - INFO - codeparrot_training - Step 256: {'lr': 0.0004999967394997534, 'samples': 131584, 'steps': 256, 'batch_loss/train': 0.9070568662136793} +12/21/2021 15:25:54 - INFO - codeparrot_training - Step 257: {'lr': 0.0004999966584968033, 'samples': 132096, 'steps': 257, 'batch_loss/train': 0.8569030165672302} +12/21/2021 15:26:06 - INFO - codeparrot_training - Step 258: {'lr': 0.0004999965764999607, 'samples': 132608, 'steps': 258, 'batch_loss/train': 0.9390520844608545} +12/21/2021 15:26:17 - INFO - codeparrot_training - Step 259: {'lr': 0.0004999964935092262, 'samples': 133120, 'steps': 259, 'batch_loss/train': 0.9352331347763538} +12/21/2021 15:26:27 - INFO - codeparrot_training - Step 260: {'lr': 0.0004999964095245999, 'samples': 133632, 'steps': 260, 'batch_loss/train': 1.0299120405688882} +12/21/2021 15:26:40 - INFO - codeparrot_training - Step 261: {'lr': 0.0004999963245460824, 'samples': 134144, 'steps': 261, 'batch_loss/train': 0.9368714848533273} +12/21/2021 15:26:50 - INFO - codeparrot_training - Step 262: {'lr': 0.0004999962385736739, 'samples': 134656, 'steps': 262, 'batch_loss/train': 0.8667335538193583} +12/21/2021 15:27:01 - INFO - codeparrot_training - Step 263: {'lr': 0.0004999961516073748, 'samples': 135168, 'steps': 263, 'batch_loss/train': 1.0675992406904697} +12/21/2021 15:27:12 - INFO - codeparrot_training - Step 264: {'lr': 0.0004999960636471853, 'samples': 135680, 'steps': 264, 'batch_loss/train': 0.9339342024177313} +12/21/2021 15:27:24 - INFO - codeparrot_training - Step 265: {'lr': 0.000499995974693106, 'samples': 136192, 'steps': 265, 'batch_loss/train': 0.9612384159117937} +12/21/2021 15:27:34 - INFO - codeparrot_training - Step 266: {'lr': 0.0004999958847451369, 'samples': 136704, 'steps': 266, 'batch_loss/train': 1.006809951737523} +12/21/2021 15:27:45 - INFO - codeparrot_training - Step 267: {'lr': 0.0004999957938032787, 'samples': 137216, 'steps': 267, 'batch_loss/train': 0.9956084182485938} +12/21/2021 15:27:57 - INFO - codeparrot_training - Step 268: {'lr': 0.0004999957018675316, 'samples': 137728, 'steps': 268, 'batch_loss/train': 0.9225286403670907} +12/21/2021 15:28:07 - INFO - codeparrot_training - Step 269: {'lr': 0.0004999956089378959, 'samples': 138240, 'steps': 269, 'batch_loss/train': 1.0786495171487331} +12/21/2021 15:28:18 - INFO - codeparrot_training - Step 270: {'lr': 0.0004999955150143721, 'samples': 138752, 'steps': 270, 'batch_loss/train': 0.9791294615715742} +12/21/2021 15:28:30 - INFO - codeparrot_training - Step 271: {'lr': 0.0004999954200969605, 'samples': 139264, 'steps': 271, 'batch_loss/train': 0.9864300889894366} +12/21/2021 15:28:41 - INFO - codeparrot_training - Step 272: {'lr': 0.0004999953241856616, 'samples': 139776, 'steps': 272, 'batch_loss/train': 0.9177341545000672} +12/21/2021 15:28:51 - INFO - codeparrot_training - Step 273: {'lr': 0.0004999952272804756, 'samples': 140288, 'steps': 273, 'batch_loss/train': 0.946137722581625} +12/21/2021 15:29:05 - INFO - codeparrot_training - Step 274: {'lr': 0.000499995129381403, 'samples': 140800, 'steps': 274, 'batch_loss/train': 0.9582210006192327} +12/21/2021 15:29:15 - INFO - codeparrot_training - Step 275: {'lr': 0.0004999950304884442, 'samples': 141312, 'steps': 275, 'batch_loss/train': 0.9708927199244499} +12/21/2021 15:29:26 - INFO - codeparrot_training - Step 276: {'lr': 0.0004999949306015995, 'samples': 141824, 'steps': 276, 'batch_loss/train': 1.87380766030401} +12/21/2021 15:29:36 - INFO - codeparrot_training - Step 277: {'lr': 0.0004999948297208693, 'samples': 142336, 'steps': 277, 'batch_loss/train': 1.1308678593486547} +12/21/2021 15:29:48 - INFO - codeparrot_training - Step 278: {'lr': 0.0004999947278462541, 'samples': 142848, 'steps': 278, 'batch_loss/train': 1.020630268380046} +12/21/2021 15:29:59 - INFO - codeparrot_training - Step 279: {'lr': 0.0004999946249777543, 'samples': 143360, 'steps': 279, 'batch_loss/train': 0.9665484940633178} +12/21/2021 15:30:09 - INFO - codeparrot_training - Step 280: {'lr': 0.0004999945211153703, 'samples': 143872, 'steps': 280, 'batch_loss/train': 1.0033400291576982} +12/21/2021 15:30:20 - INFO - codeparrot_training - Step 281: {'lr': 0.0004999944162591024, 'samples': 144384, 'steps': 281, 'batch_loss/train': 0.9401888307183981} +12/21/2021 15:30:33 - INFO - codeparrot_training - Step 282: {'lr': 0.0004999943104089511, 'samples': 144896, 'steps': 282, 'batch_loss/train': 1.0101582873612642} +12/21/2021 15:30:43 - INFO - codeparrot_training - Step 283: {'lr': 0.0004999942035649168, 'samples': 145408, 'steps': 283, 'batch_loss/train': 1.0264825131744146} +12/21/2021 15:30:54 - INFO - codeparrot_training - Step 284: {'lr': 0.0004999940957269999, 'samples': 145920, 'steps': 284, 'batch_loss/train': 0.9054904184304178} +12/21/2021 15:31:06 - INFO - codeparrot_training - Step 285: {'lr': 0.0004999939868952009, 'samples': 146432, 'steps': 285, 'batch_loss/train': 0.8634658250957727} +12/21/2021 15:31:16 - INFO - codeparrot_training - Step 286: {'lr': 0.0004999938770695203, 'samples': 146944, 'steps': 286, 'batch_loss/train': 1.0623836005106568} +12/21/2021 15:31:27 - INFO - codeparrot_training - Step 287: {'lr': 0.0004999937662499585, 'samples': 147456, 'steps': 287, 'batch_loss/train': 0.9658253453671932} +12/21/2021 15:31:39 - INFO - codeparrot_training - Step 288: {'lr': 0.0004999936544365156, 'samples': 147968, 'steps': 288, 'batch_loss/train': 0.9038360472768545} +12/21/2021 15:31:49 - INFO - codeparrot_training - Step 289: {'lr': 0.0004999935416291926, 'samples': 148480, 'steps': 289, 'batch_loss/train': 0.9555488238111138} +12/21/2021 15:32:00 - INFO - codeparrot_training - Step 290: {'lr': 0.0004999934278279894, 'samples': 148992, 'steps': 290, 'batch_loss/train': 0.9170712037011981} +12/21/2021 15:32:13 - INFO - codeparrot_training - Step 291: {'lr': 0.0004999933130329069, 'samples': 149504, 'steps': 291, 'batch_loss/train': 0.8491080598905683} +12/21/2021 15:32:23 - INFO - codeparrot_training - Step 292: {'lr': 0.0004999931972439454, 'samples': 150016, 'steps': 292, 'batch_loss/train': 0.9852352160960436} +12/21/2021 15:32:34 - INFO - codeparrot_training - Step 293: {'lr': 0.0004999930804611052, 'samples': 150528, 'steps': 293, 'batch_loss/train': 0.8676561275497079} +12/21/2021 15:32:45 - INFO - codeparrot_training - Step 294: {'lr': 0.0004999929626843871, 'samples': 151040, 'steps': 294, 'batch_loss/train': 0.957035600207746} +12/21/2021 15:32:57 - INFO - codeparrot_training - Step 295: {'lr': 0.0004999928439137913, 'samples': 151552, 'steps': 295, 'batch_loss/train': 0.8217380112037063} +12/21/2021 15:33:07 - INFO - codeparrot_training - Step 296: {'lr': 0.0004999927241493183, 'samples': 152064, 'steps': 296, 'batch_loss/train': 0.9880850934423506} +12/21/2021 15:33:18 - INFO - codeparrot_training - Step 297: {'lr': 0.0004999926033909686, 'samples': 152576, 'steps': 297, 'batch_loss/train': 0.975646959617734} +12/21/2021 15:33:31 - INFO - codeparrot_training - Step 298: {'lr': 0.0004999924816387428, 'samples': 153088, 'steps': 298, 'batch_loss/train': 0.9518024520948529} +12/21/2021 15:33:41 - INFO - codeparrot_training - Step 299: {'lr': 0.0004999923588926412, 'samples': 153600, 'steps': 299, 'batch_loss/train': 0.9249488320201635} +12/21/2021 15:33:52 - INFO - codeparrot_training - Step 300: {'lr': 0.0004999922351526644, 'samples': 154112, 'steps': 300, 'batch_loss/train': 1.021987003274262} +12/21/2021 15:34:04 - INFO - codeparrot_training - Step 301: {'lr': 0.0004999921104188129, 'samples': 154624, 'steps': 301, 'batch_loss/train': 0.944774815812707} +12/21/2021 15:34:15 - INFO - codeparrot_training - Step 302: {'lr': 0.0004999919846910871, 'samples': 155136, 'steps': 302, 'batch_loss/train': 1.0029222639277577} +12/21/2021 15:34:25 - INFO - codeparrot_training - Step 303: {'lr': 0.0004999918579694876, 'samples': 155648, 'steps': 303, 'batch_loss/train': 0.8526419843547046} +12/21/2021 15:34:36 - INFO - codeparrot_training - Step 304: {'lr': 0.0004999917302540149, 'samples': 156160, 'steps': 304, 'batch_loss/train': 0.9772113729268312} +12/21/2021 15:34:48 - INFO - codeparrot_training - Step 305: {'lr': 0.0004999916015446694, 'samples': 156672, 'steps': 305, 'batch_loss/train': 0.9436007365584373} +12/21/2021 15:34:58 - INFO - codeparrot_training - Step 306: {'lr': 0.0004999914718414517, 'samples': 157184, 'steps': 306, 'batch_loss/train': 1.0183395985513926} +12/21/2021 15:35:09 - INFO - codeparrot_training - Step 307: {'lr': 0.0004999913411443623, 'samples': 157696, 'steps': 307, 'batch_loss/train': 0.945149693172425} +12/21/2021 15:35:21 - INFO - codeparrot_training - Step 308: {'lr': 0.0004999912094534019, 'samples': 158208, 'steps': 308, 'batch_loss/train': 1.0343824811279774} +12/21/2021 15:35:32 - INFO - codeparrot_training - Step 309: {'lr': 0.0004999910767685705, 'samples': 158720, 'steps': 309, 'batch_loss/train': 0.8817191179841757} +12/21/2021 15:35:42 - INFO - codeparrot_training - Step 310: {'lr': 0.0004999909430898693, 'samples': 159232, 'steps': 310, 'batch_loss/train': 1.0754667315632105} +12/21/2021 15:35:55 - INFO - codeparrot_training - Step 311: {'lr': 0.0004999908084172982, 'samples': 159744, 'steps': 311, 'batch_loss/train': 0.8908740505576134} +12/21/2021 15:36:05 - INFO - codeparrot_training - Step 312: {'lr': 0.0004999906727508583, 'samples': 160256, 'steps': 312, 'batch_loss/train': 1.0570627879351377} +12/21/2021 15:36:16 - INFO - codeparrot_training - Step 313: {'lr': 0.0004999905360905497, 'samples': 160768, 'steps': 313, 'batch_loss/train': 0.9630824876949191} +12/21/2021 15:36:27 - INFO - codeparrot_training - Step 314: {'lr': 0.0004999903984363733, 'samples': 161280, 'steps': 314, 'batch_loss/train': 0.9628634955734015} +12/21/2021 15:36:39 - INFO - codeparrot_training - Step 315: {'lr': 0.0004999902597883294, 'samples': 161792, 'steps': 315, 'batch_loss/train': 1.0564095564186573} +12/21/2021 15:36:49 - INFO - codeparrot_training - Step 316: {'lr': 0.0004999901201464185, 'samples': 162304, 'steps': 316, 'batch_loss/train': 0.9426975306123495} +12/21/2021 15:37:00 - INFO - codeparrot_training - Step 317: {'lr': 0.0004999899795106414, 'samples': 162816, 'steps': 317, 'batch_loss/train': 0.9392783408984542} +12/21/2021 15:37:12 - INFO - codeparrot_training - Step 318: {'lr': 0.0004999898378809985, 'samples': 163328, 'steps': 318, 'batch_loss/train': 0.9344945745542645} +12/21/2021 15:37:22 - INFO - codeparrot_training - Step 319: {'lr': 0.0004999896952574904, 'samples': 163840, 'steps': 319, 'batch_loss/train': 0.9809899972751737} +12/21/2021 15:37:33 - INFO - codeparrot_training - Step 320: {'lr': 0.0004999895516401176, 'samples': 164352, 'steps': 320, 'batch_loss/train': 1.0894929263740778} +12/21/2021 15:37:46 - INFO - codeparrot_training - Step 321: {'lr': 0.0004999894070288808, 'samples': 164864, 'steps': 321, 'batch_loss/train': 0.9599240012466908} +12/21/2021 15:37:56 - INFO - codeparrot_training - Step 322: {'lr': 0.0004999892614237806, 'samples': 165376, 'steps': 322, 'batch_loss/train': 0.8795394534245133} +12/21/2021 15:38:07 - INFO - codeparrot_training - Step 323: {'lr': 0.0004999891148248174, 'samples': 165888, 'steps': 323, 'batch_loss/train': 0.9449589308351278} +12/21/2021 15:38:17 - INFO - codeparrot_training - Step 324: {'lr': 0.0004999889672319918, 'samples': 166400, 'steps': 324, 'batch_loss/train': 0.9080932587385178} +12/21/2021 15:38:30 - INFO - codeparrot_training - Step 325: {'lr': 0.0004999888186453046, 'samples': 166912, 'steps': 325, 'batch_loss/train': 1.0157170034945011} +12/21/2021 15:38:40 - INFO - codeparrot_training - Step 326: {'lr': 0.0004999886690647561, 'samples': 167424, 'steps': 326, 'batch_loss/train': 0.934957368299365} +12/21/2021 15:38:51 - INFO - codeparrot_training - Step 327: {'lr': 0.000499988518490347, 'samples': 167936, 'steps': 327, 'batch_loss/train': 0.8197665601037443} +12/21/2021 15:39:03 - INFO - codeparrot_training - Step 328: {'lr': 0.0004999883669220782, 'samples': 168448, 'steps': 328, 'batch_loss/train': 0.9284519851207733} +12/21/2021 15:39:14 - INFO - codeparrot_training - Step 329: {'lr': 0.0004999882143599499, 'samples': 168960, 'steps': 329, 'batch_loss/train': 0.9782567238435149} +12/21/2021 15:39:25 - INFO - codeparrot_training - Step 330: {'lr': 0.0004999880608039628, 'samples': 169472, 'steps': 330, 'batch_loss/train': 1.0394312385469675} +12/21/2021 15:39:36 - INFO - codeparrot_training - Step 331: {'lr': 0.0004999879062541177, 'samples': 169984, 'steps': 331, 'batch_loss/train': 0.9363724086433649} +12/21/2021 15:39:47 - INFO - codeparrot_training - Step 332: {'lr': 0.000499987750710415, 'samples': 170496, 'steps': 332, 'batch_loss/train': 0.9781676828861237} +12/21/2021 15:39:58 - INFO - codeparrot_training - Step 333: {'lr': 0.0004999875941728554, 'samples': 171008, 'steps': 333, 'batch_loss/train': 0.9209995167329907} +12/21/2021 15:40:08 - INFO - codeparrot_training - Step 334: {'lr': 0.0004999874366414395, 'samples': 171520, 'steps': 334, 'batch_loss/train': 1.0243413960561156} +12/21/2021 15:40:20 - INFO - codeparrot_training - Step 335: {'lr': 0.0004999872781161679, 'samples': 172032, 'steps': 335, 'batch_loss/train': 1.0678168572485447} +12/21/2021 15:40:31 - INFO - codeparrot_training - Step 336: {'lr': 0.0004999871185970413, 'samples': 172544, 'steps': 336, 'batch_loss/train': 0.9590579112991691} +12/21/2021 15:40:41 - INFO - codeparrot_training - Step 337: {'lr': 0.0004999869580840603, 'samples': 173056, 'steps': 337, 'batch_loss/train': 0.9584408421069384} +12/21/2021 15:40:55 - INFO - codeparrot_training - Step 338: {'lr': 0.0004999867965772256, 'samples': 173568, 'steps': 338, 'batch_loss/train': 1.0288824839517474} +12/21/2021 15:41:05 - INFO - codeparrot_training - Step 339: {'lr': 0.0004999866340765378, 'samples': 174080, 'steps': 339, 'batch_loss/train': 1.0880566388368607} +12/21/2021 15:41:16 - INFO - codeparrot_training - Step 340: {'lr': 0.0004999864705819975, 'samples': 174592, 'steps': 340, 'batch_loss/train': 1.0811644373461604} +12/21/2021 15:41:28 - INFO - codeparrot_training - Step 341: {'lr': 0.0004999863060936054, 'samples': 175104, 'steps': 341, 'batch_loss/train': 1.671540541574359} +12/21/2021 15:41:38 - INFO - codeparrot_training - Step 342: {'lr': 0.0004999861406113622, 'samples': 175616, 'steps': 342, 'batch_loss/train': 0.9352710619568825} +12/21/2021 15:41:49 - INFO - codeparrot_training - Step 343: {'lr': 0.0004999859741352683, 'samples': 176128, 'steps': 343, 'batch_loss/train': 1.0104450918734074} +12/21/2021 15:41:59 - INFO - codeparrot_training - Step 344: {'lr': 0.0004999858066653247, 'samples': 176640, 'steps': 344, 'batch_loss/train': 1.02015068102628} +12/21/2021 15:42:12 - INFO - codeparrot_training - Step 345: {'lr': 0.0004999856382015319, 'samples': 177152, 'steps': 345, 'batch_loss/train': 0.9521654164418578} +12/21/2021 15:42:23 - INFO - codeparrot_training - Step 346: {'lr': 0.0004999854687438906, 'samples': 177664, 'steps': 346, 'batch_loss/train': 0.9196355575695634} +12/21/2021 15:42:33 - INFO - codeparrot_training - Step 347: {'lr': 0.0004999852982924015, 'samples': 178176, 'steps': 347, 'batch_loss/train': 0.9492135313339531} +12/21/2021 15:42:45 - INFO - codeparrot_training - Step 348: {'lr': 0.0004999851268470652, 'samples': 178688, 'steps': 348, 'batch_loss/train': 1.071784246712923} +12/21/2021 15:42:56 - INFO - codeparrot_training - Step 349: {'lr': 0.0004999849544078823, 'samples': 179200, 'steps': 349, 'batch_loss/train': 1.1324431374669075} +12/21/2021 15:43:07 - INFO - codeparrot_training - Step 350: {'lr': 0.0004999847809748539, 'samples': 179712, 'steps': 350, 'batch_loss/train': 0.7901751389726996} +12/21/2021 15:43:19 - INFO - codeparrot_training - Step 351: {'lr': 0.0004999846065479802, 'samples': 180224, 'steps': 351, 'batch_loss/train': 0.9234656607732177} +12/21/2021 15:43:29 - INFO - codeparrot_training - Step 352: {'lr': 0.0004999844311272621, 'samples': 180736, 'steps': 352, 'batch_loss/train': 0.9884148836135864} +12/21/2021 15:43:40 - INFO - codeparrot_training - Step 353: {'lr': 0.0004999842547127003, 'samples': 181248, 'steps': 353, 'batch_loss/train': 1.0030706701800227} +12/21/2021 15:43:50 - INFO - codeparrot_training - Step 354: {'lr': 0.0004999840773042956, 'samples': 181760, 'steps': 354, 'batch_loss/train': 0.930622486397624} +12/21/2021 15:44:02 - INFO - codeparrot_training - Step 355: {'lr': 0.0004999838989020486, 'samples': 182272, 'steps': 355, 'batch_loss/train': 0.9351348020136356} +12/21/2021 15:44:13 - INFO - codeparrot_training - Step 356: {'lr': 0.0004999837195059599, 'samples': 182784, 'steps': 356, 'batch_loss/train': 1.0733826979994774} +12/21/2021 15:44:24 - INFO - codeparrot_training - Step 357: {'lr': 0.0004999835391160303, 'samples': 183296, 'steps': 357, 'batch_loss/train': 1.010494620539248} +12/21/2021 15:44:36 - INFO - codeparrot_training - Step 358: {'lr': 0.0004999833577322607, 'samples': 183808, 'steps': 358, 'batch_loss/train': 0.9469421803951263} +12/21/2021 15:44:47 - INFO - codeparrot_training - Step 359: {'lr': 0.0004999831753546514, 'samples': 184320, 'steps': 359, 'batch_loss/train': 0.9487142357975245} +12/21/2021 15:44:58 - INFO - codeparrot_training - Step 360: {'lr': 0.0004999829919832036, 'samples': 184832, 'steps': 360, 'batch_loss/train': 0.9500178750604391} +12/21/2021 15:45:10 - INFO - codeparrot_training - Step 361: {'lr': 0.0004999828076179178, 'samples': 185344, 'steps': 361, 'batch_loss/train': 0.9616537699475884} +12/21/2021 15:45:20 - INFO - codeparrot_training - Step 362: {'lr': 0.0004999826222587947, 'samples': 185856, 'steps': 362, 'batch_loss/train': 0.9094876050949097} +12/21/2021 15:45:31 - INFO - codeparrot_training - Step 363: {'lr': 0.000499982435905835, 'samples': 186368, 'steps': 363, 'batch_loss/train': 1.0322210416197777} +12/21/2021 15:45:43 - INFO - codeparrot_training - Step 364: {'lr': 0.0004999822485590396, 'samples': 186880, 'steps': 364, 'batch_loss/train': 1.1324112638831139} +12/21/2021 15:45:53 - INFO - codeparrot_training - Step 365: {'lr': 0.0004999820602184092, 'samples': 187392, 'steps': 365, 'batch_loss/train': 0.9534476650878787} +12/21/2021 15:46:04 - INFO - codeparrot_training - Step 366: {'lr': 0.0004999818708839445, 'samples': 187904, 'steps': 366, 'batch_loss/train': 1.0070430897176266} +12/21/2021 15:46:15 - INFO - codeparrot_training - Step 367: {'lr': 0.0004999816805556463, 'samples': 188416, 'steps': 367, 'batch_loss/train': 0.8387978570535779} +12/21/2021 15:46:27 - INFO - codeparrot_training - Step 368: {'lr': 0.0004999814892335153, 'samples': 188928, 'steps': 368, 'batch_loss/train': 0.9353106217458844} +12/21/2021 15:46:38 - INFO - codeparrot_training - Step 369: {'lr': 0.0004999812969175522, 'samples': 189440, 'steps': 369, 'batch_loss/train': 0.9284348818473518} +12/21/2021 15:46:49 - INFO - codeparrot_training - Step 370: {'lr': 0.0004999811036077579, 'samples': 189952, 'steps': 370, 'batch_loss/train': 0.9561633160337806} +12/21/2021 15:47:01 - INFO - codeparrot_training - Step 371: {'lr': 0.0004999809093041332, 'samples': 190464, 'steps': 371, 'batch_loss/train': 0.9692120300605893} +12/21/2021 15:47:11 - INFO - codeparrot_training - Step 372: {'lr': 0.0004999807140066788, 'samples': 190976, 'steps': 372, 'batch_loss/train': 1.0437386203557253} +12/21/2021 15:47:22 - INFO - codeparrot_training - Step 373: {'lr': 0.0004999805177153954, 'samples': 191488, 'steps': 373, 'batch_loss/train': 1.045035165734589} +12/21/2021 15:47:34 - INFO - codeparrot_training - Step 374: {'lr': 0.0004999803204302839, 'samples': 192000, 'steps': 374, 'batch_loss/train': 0.8727764482609928} +12/21/2021 15:47:44 - INFO - codeparrot_training - Step 375: {'lr': 0.0004999801221513449, 'samples': 192512, 'steps': 375, 'batch_loss/train': 0.8536123684607446} +12/21/2021 15:47:55 - INFO - codeparrot_training - Step 376: {'lr': 0.0004999799228785796, 'samples': 193024, 'steps': 376, 'batch_loss/train': 0.9042056417092681} +12/21/2021 15:48:05 - INFO - codeparrot_training - Step 377: {'lr': 0.0004999797226119882, 'samples': 193536, 'steps': 377, 'batch_loss/train': 0.9008130421862006} +12/21/2021 15:48:18 - INFO - codeparrot_training - Step 378: {'lr': 0.0004999795213515719, 'samples': 194048, 'steps': 378, 'batch_loss/train': 0.9194910684600472} +12/21/2021 15:48:29 - INFO - codeparrot_training - Step 379: {'lr': 0.0004999793190973316, 'samples': 194560, 'steps': 379, 'batch_loss/train': 0.9500436671078205} +12/21/2021 15:48:39 - INFO - codeparrot_training - Step 380: {'lr': 0.0004999791158492678, 'samples': 195072, 'steps': 380, 'batch_loss/train': 0.9273672616109252} +12/21/2021 15:48:51 - INFO - codeparrot_training - Step 381: {'lr': 0.0004999789116073814, 'samples': 195584, 'steps': 381, 'batch_loss/train': 1.0207349183037877} +12/21/2021 15:49:02 - INFO - codeparrot_training - Step 382: {'lr': 0.0004999787063716732, 'samples': 196096, 'steps': 382, 'batch_loss/train': 0.9625328816473484} +12/21/2021 15:49:13 - INFO - codeparrot_training - Step 383: {'lr': 0.000499978500142144, 'samples': 196608, 'steps': 383, 'batch_loss/train': 0.7469302010722458} +12/21/2021 15:49:25 - INFO - codeparrot_training - Step 384: {'lr': 0.0004999782929187948, 'samples': 197120, 'steps': 384, 'batch_loss/train': 0.9214018918573856} +12/21/2021 15:49:36 - INFO - codeparrot_training - Step 385: {'lr': 0.0004999780847016263, 'samples': 197632, 'steps': 385, 'batch_loss/train': 1.0566344875842333} +12/21/2021 15:49:46 - INFO - codeparrot_training - Step 386: {'lr': 0.0004999778754906394, 'samples': 198144, 'steps': 386, 'batch_loss/train': 0.9904032200574875} +12/21/2021 15:49:58 - INFO - codeparrot_training - Step 387: {'lr': 0.0004999776652858346, 'samples': 198656, 'steps': 387, 'batch_loss/train': 1.0329768899828196} +12/21/2021 15:50:09 - INFO - codeparrot_training - Step 388: {'lr': 0.0004999774540872132, 'samples': 199168, 'steps': 388, 'batch_loss/train': 0.9588639475405216} +12/21/2021 15:50:20 - INFO - codeparrot_training - Step 389: {'lr': 0.0004999772418947758, 'samples': 199680, 'steps': 389, 'batch_loss/train': 0.9446934647858143} +12/21/2021 15:50:30 - INFO - codeparrot_training - Step 390: {'lr': 0.0004999770287085232, 'samples': 200192, 'steps': 390, 'batch_loss/train': 0.9255288196727633} +12/21/2021 15:50:42 - INFO - codeparrot_training - Step 391: {'lr': 0.0004999768145284564, 'samples': 200704, 'steps': 391, 'batch_loss/train': 0.9534507757052779} +12/21/2021 15:50:53 - INFO - codeparrot_training - Step 392: {'lr': 0.000499976599354576, 'samples': 201216, 'steps': 392, 'batch_loss/train': 1.0267576836049557} +12/21/2021 15:51:03 - INFO - codeparrot_training - Step 393: {'lr': 0.0004999763831868832, 'samples': 201728, 'steps': 393, 'batch_loss/train': 0.9819985963404179} +12/21/2021 15:51:16 - INFO - codeparrot_training - Step 394: {'lr': 0.0004999761660253786, 'samples': 202240, 'steps': 394, 'batch_loss/train': 0.9275947390124202} +12/21/2021 15:51:26 - INFO - codeparrot_training - Step 395: {'lr': 0.0004999759478700632, 'samples': 202752, 'steps': 395, 'batch_loss/train': 0.9359030965715647} +12/21/2021 15:51:37 - INFO - codeparrot_training - Step 396: {'lr': 0.0004999757287209379, 'samples': 203264, 'steps': 396, 'batch_loss/train': 0.9917447231709957} +12/21/2021 15:51:50 - INFO - codeparrot_training - Step 397: {'lr': 0.0004999755085780033, 'samples': 203776, 'steps': 397, 'batch_loss/train': 0.8854732243344188} +12/21/2021 15:52:00 - INFO - codeparrot_training - Step 398: {'lr': 0.0004999752874412606, 'samples': 204288, 'steps': 398, 'batch_loss/train': 0.9453186276368797} +12/21/2021 15:52:11 - INFO - codeparrot_training - Step 399: {'lr': 0.0004999750653107106, 'samples': 204800, 'steps': 399, 'batch_loss/train': 0.9650594405829906} +12/21/2021 15:52:21 - INFO - codeparrot_training - Step 400: {'lr': 0.000499974842186354, 'samples': 205312, 'steps': 400, 'batch_loss/train': 0.9882339080795646} +12/21/2021 15:52:33 - INFO - codeparrot_training - Step 401: {'lr': 0.0004999746180681918, 'samples': 205824, 'steps': 401, 'batch_loss/train': 0.8767112297937274} +12/21/2021 15:52:44 - INFO - codeparrot_training - Step 402: {'lr': 0.000499974392956225, 'samples': 206336, 'steps': 402, 'batch_loss/train': 0.9569975165650249} +12/21/2021 15:52:54 - INFO - codeparrot_training - Step 403: {'lr': 0.0004999741668504544, 'samples': 206848, 'steps': 403, 'batch_loss/train': 0.7826286354102194} +12/21/2021 15:53:06 - INFO - codeparrot_training - Step 404: {'lr': 0.0004999739397508808, 'samples': 207360, 'steps': 404, 'batch_loss/train': 0.9592352164909244} +12/21/2021 15:53:17 - INFO - codeparrot_training - Step 405: {'lr': 0.0004999737116575053, 'samples': 207872, 'steps': 405, 'batch_loss/train': 1.034396498464048} +12/21/2021 15:53:28 - INFO - codeparrot_training - Step 406: {'lr': 0.0004999734825703287, 'samples': 208384, 'steps': 406, 'batch_loss/train': 1.0085913049988449} +12/21/2021 15:53:40 - INFO - codeparrot_training - Step 407: {'lr': 0.0004999732524893518, 'samples': 208896, 'steps': 407, 'batch_loss/train': 0.961815488524735} +12/21/2021 15:53:51 - INFO - codeparrot_training - Step 408: {'lr': 0.0004999730214145758, 'samples': 209408, 'steps': 408, 'batch_loss/train': 0.9491942655295134} +12/21/2021 15:54:01 - INFO - codeparrot_training - Step 409: {'lr': 0.0004999727893460013, 'samples': 209920, 'steps': 409, 'batch_loss/train': 0.973499272018671} +12/21/2021 15:54:12 - INFO - codeparrot_training - Step 410: {'lr': 0.0004999725562836295, 'samples': 210432, 'steps': 410, 'batch_loss/train': 0.9724812339991331} +12/21/2021 15:54:24 - INFO - codeparrot_training - Step 411: {'lr': 0.0004999723222274612, 'samples': 210944, 'steps': 411, 'batch_loss/train': 0.8766064941883087} +12/21/2021 15:54:35 - INFO - codeparrot_training - Step 412: {'lr': 0.0004999720871774972, 'samples': 211456, 'steps': 412, 'batch_loss/train': 0.7345240158028901} +12/21/2021 15:54:46 - INFO - codeparrot_training - Step 413: {'lr': 0.0004999718511337387, 'samples': 211968, 'steps': 413, 'batch_loss/train': 0.9546373225748539} +12/21/2021 15:54:58 - INFO - codeparrot_training - Step 414: {'lr': 0.0004999716140961864, 'samples': 212480, 'steps': 414, 'batch_loss/train': 1.066631537862122} +12/21/2021 15:55:09 - INFO - codeparrot_training - Step 415: {'lr': 0.0004999713760648415, 'samples': 212992, 'steps': 415, 'batch_loss/train': 0.929376820102334} +12/21/2021 15:55:19 - INFO - codeparrot_training - Step 416: {'lr': 0.0004999711370397047, 'samples': 213504, 'steps': 416, 'batch_loss/train': 0.9020446855574846} +12/21/2021 15:55:31 - INFO - codeparrot_training - Step 417: {'lr': 0.0004999708970207771, 'samples': 214016, 'steps': 417, 'batch_loss/train': 0.9906241353601217} +12/21/2021 15:55:42 - INFO - codeparrot_training - Step 418: {'lr': 0.0004999706560080595, 'samples': 214528, 'steps': 418, 'batch_loss/train': 0.9769453471526504} +12/21/2021 15:55:53 - INFO - codeparrot_training - Step 419: {'lr': 0.000499970414001553, 'samples': 215040, 'steps': 419, 'batch_loss/train': 0.9733418663963675} +12/21/2021 15:56:04 - INFO - codeparrot_training - Step 420: {'lr': 0.0004999701710012585, 'samples': 215552, 'steps': 420, 'batch_loss/train': 0.9755980726331472} +12/21/2021 15:56:15 - INFO - codeparrot_training - Step 421: {'lr': 0.000499969927007177, 'samples': 216064, 'steps': 421, 'batch_loss/train': 0.8809562092646956} +12/21/2021 15:56:26 - INFO - codeparrot_training - Step 422: {'lr': 0.0004999696820193095, 'samples': 216576, 'steps': 422, 'batch_loss/train': 0.9990965034812689} +12/21/2021 15:56:36 - INFO - codeparrot_training - Step 423: {'lr': 0.0004999694360376569, 'samples': 217088, 'steps': 423, 'batch_loss/train': 0.986396006308496} +12/21/2021 15:56:49 - INFO - codeparrot_training - Step 424: {'lr': 0.0004999691890622202, 'samples': 217600, 'steps': 424, 'batch_loss/train': 1.0693664755672216} +12/21/2021 15:56:59 - INFO - codeparrot_training - Step 425: {'lr': 0.0004999689410930003, 'samples': 218112, 'steps': 425, 'batch_loss/train': 0.8571261004544795} +12/21/2021 15:57:10 - INFO - codeparrot_training - Step 426: {'lr': 0.0004999686921299984, 'samples': 218624, 'steps': 426, 'batch_loss/train': 0.8347470150329173} +12/21/2021 15:57:22 - INFO - codeparrot_training - Step 427: {'lr': 0.0004999684421732153, 'samples': 219136, 'steps': 427, 'batch_loss/train': 0.8844667943194509} +12/21/2021 15:57:33 - INFO - codeparrot_training - Step 428: {'lr': 0.0004999681912226521, 'samples': 219648, 'steps': 428, 'batch_loss/train': 0.9169071968644857} +12/21/2021 15:57:43 - INFO - codeparrot_training - Step 429: {'lr': 0.0004999679392783098, 'samples': 220160, 'steps': 429, 'batch_loss/train': 1.0903414757922292} +12/21/2021 15:57:55 - INFO - codeparrot_training - Step 430: {'lr': 0.0004999676863401893, 'samples': 220672, 'steps': 430, 'batch_loss/train': 0.9996788166463375} +12/21/2021 15:58:06 - INFO - codeparrot_training - Step 431: {'lr': 0.0004999674324082916, 'samples': 221184, 'steps': 431, 'batch_loss/train': 0.9633755311369896} +12/21/2021 15:58:16 - INFO - codeparrot_training - Step 432: {'lr': 0.0004999671774826179, 'samples': 221696, 'steps': 432, 'batch_loss/train': 0.8803270729258657} +12/21/2021 15:58:27 - INFO - codeparrot_training - Step 433: {'lr': 0.000499966921563169, 'samples': 222208, 'steps': 433, 'batch_loss/train': 1.026193905621767} +12/21/2021 15:58:41 - INFO - codeparrot_training - Step 434: {'lr': 0.000499966664649946, 'samples': 222720, 'steps': 434, 'batch_loss/train': 0.9324929639697075} +12/21/2021 15:58:51 - INFO - codeparrot_training - Step 435: {'lr': 0.0004999664067429499, 'samples': 223232, 'steps': 435, 'batch_loss/train': 1.0224649542942643} +12/21/2021 15:59:02 - INFO - codeparrot_training - Step 436: {'lr': 0.0004999661478421819, 'samples': 223744, 'steps': 436, 'batch_loss/train': 0.8969925502315164} +12/21/2021 15:59:14 - INFO - codeparrot_training - Step 437: {'lr': 0.0004999658879476427, 'samples': 224256, 'steps': 437, 'batch_loss/train': 0.9263608362525702} +12/21/2021 15:59:24 - INFO - codeparrot_training - Step 438: {'lr': 0.0004999656270593336, 'samples': 224768, 'steps': 438, 'batch_loss/train': 0.9655403601936996} +12/21/2021 15:59:35 - INFO - codeparrot_training - Step 439: {'lr': 0.0004999653651772555, 'samples': 225280, 'steps': 439, 'batch_loss/train': 0.9194264309480786} +12/21/2021 15:59:47 - INFO - codeparrot_training - Step 440: {'lr': 0.0004999651023014095, 'samples': 225792, 'steps': 440, 'batch_loss/train': 0.9931161850690842} +12/21/2021 15:59:57 - INFO - codeparrot_training - Step 441: {'lr': 0.0004999648384317967, 'samples': 226304, 'steps': 441, 'batch_loss/train': 0.9761939644813538} +12/21/2021 16:00:08 - INFO - codeparrot_training - Step 442: {'lr': 0.0004999645735684181, 'samples': 226816, 'steps': 442, 'batch_loss/train': 0.9765765778720379} +12/21/2021 16:00:19 - INFO - codeparrot_training - Step 443: {'lr': 0.0004999643077112747, 'samples': 227328, 'steps': 443, 'batch_loss/train': 0.9984427690505981} +12/21/2021 16:00:32 - INFO - codeparrot_training - Step 444: {'lr': 0.0004999640408603676, 'samples': 227840, 'steps': 444, 'batch_loss/train': 0.8500730954110622} +12/21/2021 16:00:42 - INFO - codeparrot_training - Step 445: {'lr': 0.0004999637730156979, 'samples': 228352, 'steps': 445, 'batch_loss/train': 0.8858804395422339} +12/21/2021 16:00:53 - INFO - codeparrot_training - Step 446: {'lr': 0.0004999635041772665, 'samples': 228864, 'steps': 446, 'batch_loss/train': 0.8956717094406486} +12/21/2021 16:01:05 - INFO - codeparrot_training - Step 447: {'lr': 0.0004999632343450747, 'samples': 229376, 'steps': 447, 'batch_loss/train': 1.0046755797229707} +12/21/2021 16:01:16 - INFO - codeparrot_training - Step 448: {'lr': 0.0004999629635191234, 'samples': 229888, 'steps': 448, 'batch_loss/train': 0.7434851163998246} +12/21/2021 16:01:26 - INFO - codeparrot_training - Step 449: {'lr': 0.0004999626916994137, 'samples': 230400, 'steps': 449, 'batch_loss/train': 0.9022434931248426} +12/21/2021 16:01:38 - INFO - codeparrot_training - Step 450: {'lr': 0.0004999624188859468, 'samples': 230912, 'steps': 450, 'batch_loss/train': 0.9132275562733412} +12/21/2021 16:01:49 - INFO - codeparrot_training - Step 451: {'lr': 0.0004999621450787237, 'samples': 231424, 'steps': 451, 'batch_loss/train': 0.8767648488283157} +12/21/2021 16:02:00 - INFO - codeparrot_training - Step 452: {'lr': 0.0004999618702777455, 'samples': 231936, 'steps': 452, 'batch_loss/train': 0.8904498647898436} +12/21/2021 16:02:12 - INFO - codeparrot_training - Step 453: {'lr': 0.0004999615944830132, 'samples': 232448, 'steps': 453, 'batch_loss/train': 0.9733011778444052} +12/21/2021 16:02:23 - INFO - codeparrot_training - Step 454: {'lr': 0.0004999613176945282, 'samples': 232960, 'steps': 454, 'batch_loss/train': 0.8590325242839754} +12/21/2021 16:02:34 - INFO - codeparrot_training - Step 455: {'lr': 0.0004999610399122912, 'samples': 233472, 'steps': 455, 'batch_loss/train': 0.840726169757545} +12/21/2021 16:02:44 - INFO - codeparrot_training - Step 456: {'lr': 0.0004999607611363035, 'samples': 233984, 'steps': 456, 'batch_loss/train': 0.9300893796607852} +12/21/2021 16:02:56 - INFO - codeparrot_training - Step 457: {'lr': 0.0004999604813665662, 'samples': 234496, 'steps': 457, 'batch_loss/train': 0.9475970091298223} +12/21/2021 16:03:07 - INFO - codeparrot_training - Step 458: {'lr': 0.0004999602006030804, 'samples': 235008, 'steps': 458, 'batch_loss/train': 1.0036976113915443} +12/21/2021 16:03:17 - INFO - codeparrot_training - Step 459: {'lr': 0.0004999599188458472, 'samples': 235520, 'steps': 459, 'batch_loss/train': 0.9122916385531425} +12/21/2021 16:03:30 - INFO - codeparrot_training - Step 460: {'lr': 0.0004999596360948679, 'samples': 236032, 'steps': 460, 'batch_loss/train': 0.9647639291360974} +12/21/2021 16:03:41 - INFO - codeparrot_training - Step 461: {'lr': 0.0004999593523501433, 'samples': 236544, 'steps': 461, 'batch_loss/train': 0.9536460340023041} +12/21/2021 16:03:51 - INFO - codeparrot_training - Step 462: {'lr': 0.0004999590676116747, 'samples': 237056, 'steps': 462, 'batch_loss/train': 0.8009715909138322} +12/21/2021 16:04:03 - INFO - codeparrot_training - Step 463: {'lr': 0.0004999587818794633, 'samples': 237568, 'steps': 463, 'batch_loss/train': 0.9024169323965907} +12/21/2021 16:04:14 - INFO - codeparrot_training - Step 464: {'lr': 0.0004999584951535102, 'samples': 238080, 'steps': 464, 'batch_loss/train': 0.8396811774000525} +12/21/2021 16:04:24 - INFO - codeparrot_training - Step 465: {'lr': 0.0004999582074338163, 'samples': 238592, 'steps': 465, 'batch_loss/train': 0.928785752505064} +12/21/2021 16:04:37 - INFO - codeparrot_training - Step 466: {'lr': 0.0004999579187203832, 'samples': 239104, 'steps': 466, 'batch_loss/train': 0.9804435735568404} +12/21/2021 16:04:47 - INFO - codeparrot_training - Step 467: {'lr': 0.0004999576290132117, 'samples': 239616, 'steps': 467, 'batch_loss/train': 0.9512661406770349} +12/21/2021 16:04:58 - INFO - codeparrot_training - Step 468: {'lr': 0.0004999573383123031, 'samples': 240128, 'steps': 468, 'batch_loss/train': 0.9538242537528276} +12/21/2021 16:05:08 - INFO - codeparrot_training - Step 469: {'lr': 0.0004999570466176584, 'samples': 240640, 'steps': 469, 'batch_loss/train': 0.9624655842781067} +12/21/2021 16:05:21 - INFO - codeparrot_training - Step 470: {'lr': 0.0004999567539292789, 'samples': 241152, 'steps': 470, 'batch_loss/train': 0.9739622380584478} +12/21/2021 16:05:32 - INFO - codeparrot_training - Step 471: {'lr': 0.0004999564602471657, 'samples': 241664, 'steps': 471, 'batch_loss/train': 0.9446419794112444} +12/21/2021 16:05:42 - INFO - codeparrot_training - Step 472: {'lr': 0.00049995616557132, 'samples': 242176, 'steps': 472, 'batch_loss/train': 0.9030982083640993} +12/21/2021 16:05:54 - INFO - codeparrot_training - Step 473: {'lr': 0.000499955869901743, 'samples': 242688, 'steps': 473, 'batch_loss/train': 0.8914791769348085} +12/21/2021 16:06:05 - INFO - codeparrot_training - Step 474: {'lr': 0.0004999555732384358, 'samples': 243200, 'steps': 474, 'batch_loss/train': 0.9041023524478078} +12/21/2021 16:06:16 - INFO - codeparrot_training - Step 475: {'lr': 0.0004999552755813997, 'samples': 243712, 'steps': 475, 'batch_loss/train': 0.9548308560624719} +12/21/2021 16:06:28 - INFO - codeparrot_training - Step 476: {'lr': 0.0004999549769306357, 'samples': 244224, 'steps': 476, 'batch_loss/train': 0.9872032226994634} +12/21/2021 16:06:38 - INFO - codeparrot_training - Step 477: {'lr': 0.0004999546772861452, 'samples': 244736, 'steps': 477, 'batch_loss/train': 0.9012071953620762} +12/21/2021 16:06:49 - INFO - codeparrot_training - Step 478: {'lr': 0.0004999543766479291, 'samples': 245248, 'steps': 478, 'batch_loss/train': 0.9515836574137211} +12/21/2021 16:06:59 - INFO - codeparrot_training - Step 479: {'lr': 0.000499954075015989, 'samples': 245760, 'steps': 479, 'batch_loss/train': 0.780715158674866} +12/21/2021 16:07:11 - INFO - codeparrot_training - Step 480: {'lr': 0.0004999537723903258, 'samples': 246272, 'steps': 480, 'batch_loss/train': 0.8905156045220792} +12/21/2021 16:07:22 - INFO - codeparrot_training - Step 481: {'lr': 0.0004999534687709407, 'samples': 246784, 'steps': 481, 'batch_loss/train': 0.8681614296510816} +12/21/2021 16:07:32 - INFO - codeparrot_training - Step 482: {'lr': 0.0004999531641578351, 'samples': 247296, 'steps': 482, 'batch_loss/train': 0.929211019538343} +12/21/2021 16:07:46 - INFO - codeparrot_training - Step 483: {'lr': 0.00049995285855101, 'samples': 247808, 'steps': 483, 'batch_loss/train': 0.9394317781552672} +12/21/2021 16:07:56 - INFO - codeparrot_training - Step 484: {'lr': 0.0004999525519504668, 'samples': 248320, 'steps': 484, 'batch_loss/train': 0.955608868971467} +12/21/2021 16:08:07 - INFO - codeparrot_training - Step 485: {'lr': 0.0004999522443562066, 'samples': 248832, 'steps': 485, 'batch_loss/train': 0.8707458833232522} +12/21/2021 16:08:17 - INFO - codeparrot_training - Step 486: {'lr': 0.0004999519357682306, 'samples': 249344, 'steps': 486, 'batch_loss/train': 1.1621080995246302} +12/21/2021 16:08:29 - INFO - codeparrot_training - Step 487: {'lr': 0.0004999516261865401, 'samples': 249856, 'steps': 487, 'batch_loss/train': 0.8691662596538663} +12/21/2021 16:08:40 - INFO - codeparrot_training - Step 488: {'lr': 0.0004999513156111364, 'samples': 250368, 'steps': 488, 'batch_loss/train': 0.9347028806805611} +12/21/2021 16:08:51 - INFO - codeparrot_training - Step 489: {'lr': 0.0004999510040420206, 'samples': 250880, 'steps': 489, 'batch_loss/train': 0.9338290086016059} +12/21/2021 16:09:03 - INFO - codeparrot_training - Step 490: {'lr': 0.000499950691479194, 'samples': 251392, 'steps': 490, 'batch_loss/train': 1.0669192932546139} +12/21/2021 16:09:14 - INFO - codeparrot_training - Step 491: {'lr': 0.0004999503779226578, 'samples': 251904, 'steps': 491, 'batch_loss/train': 1.0738368043676019} +12/21/2021 16:09:25 - INFO - codeparrot_training - Step 492: {'lr': 0.0004999500633724132, 'samples': 252416, 'steps': 492, 'batch_loss/train': 0.9119481407105923} +12/21/2021 16:09:37 - INFO - codeparrot_training - Step 493: {'lr': 0.0004999497478284617, 'samples': 252928, 'steps': 493, 'batch_loss/train': 0.7464928608387709} +12/21/2021 16:09:47 - INFO - codeparrot_training - Step 494: {'lr': 0.0004999494312908043, 'samples': 253440, 'steps': 494, 'batch_loss/train': 0.7216577949002385} +12/21/2021 16:09:58 - INFO - codeparrot_training - Step 495: {'lr': 0.0004999491137594423, 'samples': 253952, 'steps': 495, 'batch_loss/train': 0.9382506683468819} +12/21/2021 16:10:10 - INFO - codeparrot_training - Step 496: {'lr': 0.000499948795234377, 'samples': 254464, 'steps': 496, 'batch_loss/train': 1.0057146502658725} +12/21/2021 16:10:21 - INFO - codeparrot_training - Step 497: {'lr': 0.0004999484757156097, 'samples': 254976, 'steps': 497, 'batch_loss/train': 0.9702515061944723} +12/21/2021 16:10:31 - INFO - codeparrot_training - Step 498: {'lr': 0.0004999481552031415, 'samples': 255488, 'steps': 498, 'batch_loss/train': 0.8413978372700512} +12/21/2021 16:10:42 - INFO - codeparrot_training - Step 499: {'lr': 0.000499947833696974, 'samples': 256000, 'steps': 499, 'batch_loss/train': 1.068810211494565} +12/21/2021 16:10:54 - INFO - codeparrot_training - Step 500: {'lr': 0.0004999475111971082, 'samples': 256512, 'steps': 500, 'batch_loss/train': 0.9169547948986292} +12/21/2021 16:11:05 - INFO - codeparrot_training - Step 501: {'lr': 0.0004999471877035456, 'samples': 257024, 'steps': 501, 'batch_loss/train': 0.9324342245236039} +12/21/2021 16:11:15 - INFO - codeparrot_training - Step 502: {'lr': 0.0004999468632162872, 'samples': 257536, 'steps': 502, 'batch_loss/train': 0.9523064000532031} +12/21/2021 16:11:27 - INFO - codeparrot_training - Step 503: {'lr': 0.0004999465377353345, 'samples': 258048, 'steps': 503, 'batch_loss/train': 0.9037008536979556} +12/21/2021 16:11:38 - INFO - codeparrot_training - Step 504: {'lr': 0.0004999462112606888, 'samples': 258560, 'steps': 504, 'batch_loss/train': 0.8526471545919776} +12/21/2021 16:11:48 - INFO - codeparrot_training - Step 505: {'lr': 0.0004999458837923513, 'samples': 259072, 'steps': 505, 'batch_loss/train': 0.9257241990417242} +12/21/2021 16:12:01 - INFO - codeparrot_training - Step 506: {'lr': 0.0004999455553303233, 'samples': 259584, 'steps': 506, 'batch_loss/train': 0.907796804793179} +12/21/2021 16:12:11 - INFO - codeparrot_training - Step 507: {'lr': 0.0004999452258746061, 'samples': 260096, 'steps': 507, 'batch_loss/train': 0.9255656283348799} +12/21/2021 16:12:22 - INFO - codeparrot_training - Step 508: {'lr': 0.0004999448954252012, 'samples': 260608, 'steps': 508, 'batch_loss/train': 0.8958309143781662} +12/21/2021 16:12:34 - INFO - codeparrot_training - Step 509: {'lr': 0.0004999445639821097, 'samples': 261120, 'steps': 509, 'batch_loss/train': 0.8369890097528696} +12/21/2021 16:12:45 - INFO - codeparrot_training - Step 510: {'lr': 0.0004999442315453329, 'samples': 261632, 'steps': 510, 'batch_loss/train': 0.9495102781802416} +12/21/2021 16:12:55 - INFO - codeparrot_training - Step 511: {'lr': 0.0004999438981148724, 'samples': 262144, 'steps': 511, 'batch_loss/train': 0.9727047346532345} +12/21/2021 16:13:06 - INFO - codeparrot_training - Step 512: {'lr': 0.0004999435636907293, 'samples': 262656, 'steps': 512, 'batch_loss/train': 0.9792565805837512} +12/21/2021 16:13:19 - INFO - codeparrot_training - Step 513: {'lr': 0.0004999432282729048, 'samples': 263168, 'steps': 513, 'batch_loss/train': 0.7626077516470104} +12/21/2021 16:13:29 - INFO - codeparrot_training - Step 514: {'lr': 0.0004999428918614006, 'samples': 263680, 'steps': 514, 'batch_loss/train': 0.8711663307622075} +12/21/2021 16:13:40 - INFO - codeparrot_training - Step 515: {'lr': 0.0004999425544562178, 'samples': 264192, 'steps': 515, 'batch_loss/train': 0.9464005529880524} +12/21/2021 16:13:52 - INFO - codeparrot_training - Step 516: {'lr': 0.0004999422160573575, 'samples': 264704, 'steps': 516, 'batch_loss/train': 0.8990733688697219} +12/21/2021 16:14:03 - INFO - codeparrot_training - Step 517: {'lr': 0.0004999418766648216, 'samples': 265216, 'steps': 517, 'batch_loss/train': 0.968270679935813} +12/21/2021 16:14:13 - INFO - codeparrot_training - Step 518: {'lr': 0.0004999415362786112, 'samples': 265728, 'steps': 518, 'batch_loss/train': 1.0139030292630196} +12/21/2021 16:14:26 - INFO - codeparrot_training - Step 519: {'lr': 0.0004999411948987275, 'samples': 266240, 'steps': 519, 'batch_loss/train': 0.8389423368498683} +12/21/2021 16:14:37 - INFO - codeparrot_training - Step 520: {'lr': 0.000499940852525172, 'samples': 266752, 'steps': 520, 'batch_loss/train': 0.9152657091617584} +12/21/2021 16:14:47 - INFO - codeparrot_training - Step 521: {'lr': 0.0004999405091579461, 'samples': 267264, 'steps': 521, 'batch_loss/train': 0.718719408614561} +12/21/2021 16:14:58 - INFO - codeparrot_training - Step 522: {'lr': 0.000499940164797051, 'samples': 267776, 'steps': 522, 'batch_loss/train': 0.8578516519628465} +12/21/2021 16:15:10 - INFO - codeparrot_training - Step 523: {'lr': 0.0004999398194424884, 'samples': 268288, 'steps': 523, 'batch_loss/train': 0.9500483646988869} +12/21/2021 16:15:20 - INFO - codeparrot_training - Step 524: {'lr': 0.0004999394730942592, 'samples': 268800, 'steps': 524, 'batch_loss/train': 0.880490711890161} +12/21/2021 16:15:31 - INFO - codeparrot_training - Step 525: {'lr': 0.0004999391257523653, 'samples': 269312, 'steps': 525, 'batch_loss/train': 0.9496993944048882} +12/21/2021 16:15:43 - INFO - codeparrot_training - Step 526: {'lr': 0.0004999387774168076, 'samples': 269824, 'steps': 526, 'batch_loss/train': 0.9183698203414679} +12/21/2021 16:15:53 - INFO - codeparrot_training - Step 527: {'lr': 0.0004999384280875877, 'samples': 270336, 'steps': 527, 'batch_loss/train': 0.8676091292873025} +12/21/2021 16:16:04 - INFO - codeparrot_training - Step 528: {'lr': 0.000499938077764707, 'samples': 270848, 'steps': 528, 'batch_loss/train': 0.8968275524675846} +12/21/2021 16:16:17 - INFO - codeparrot_training - Step 529: {'lr': 0.0004999377264481669, 'samples': 271360, 'steps': 529, 'batch_loss/train': 0.8513631895184517} +12/21/2021 16:16:27 - INFO - codeparrot_training - Step 530: {'lr': 0.0004999373741379688, 'samples': 271872, 'steps': 530, 'batch_loss/train': 0.8707464477047324} +12/21/2021 16:16:38 - INFO - codeparrot_training - Step 531: {'lr': 0.000499937020834114, 'samples': 272384, 'steps': 531, 'batch_loss/train': 0.9676802353933454} +12/21/2021 16:16:50 - INFO - codeparrot_training - Step 532: {'lr': 0.0004999366665366041, 'samples': 272896, 'steps': 532, 'batch_loss/train': 1.0208979127928615} +12/21/2021 16:17:01 - INFO - codeparrot_training - Step 533: {'lr': 0.0004999363112454402, 'samples': 273408, 'steps': 533, 'batch_loss/train': 0.7659446746110916} +12/21/2021 16:17:11 - INFO - codeparrot_training - Step 534: {'lr': 0.0004999359549606241, 'samples': 273920, 'steps': 534, 'batch_loss/train': 0.9105313969776034} +12/21/2021 16:17:22 - INFO - codeparrot_training - Step 535: {'lr': 0.0004999355976821569, 'samples': 274432, 'steps': 535, 'batch_loss/train': 0.9344321079552174} +12/21/2021 16:17:34 - INFO - codeparrot_training - Step 536: {'lr': 0.0004999352394100401, 'samples': 274944, 'steps': 536, 'batch_loss/train': 0.8789194077253342} +12/21/2021 16:17:44 - INFO - codeparrot_training - Step 537: {'lr': 0.0004999348801442753, 'samples': 275456, 'steps': 537, 'batch_loss/train': 0.7410235283896327} +12/21/2021 16:17:55 - INFO - codeparrot_training - Step 538: {'lr': 0.0004999345198848637, 'samples': 275968, 'steps': 538, 'batch_loss/train': 0.8595439791679382} +12/21/2021 16:18:08 - INFO - codeparrot_training - Step 539: {'lr': 0.0004999341586318067, 'samples': 276480, 'steps': 539, 'batch_loss/train': 0.8188903618138283} +12/21/2021 16:18:18 - INFO - codeparrot_training - Step 540: {'lr': 0.0004999337963851061, 'samples': 276992, 'steps': 540, 'batch_loss/train': 0.9862461071461439} +12/21/2021 16:18:29 - INFO - codeparrot_training - Step 541: {'lr': 0.000499933433144763, 'samples': 277504, 'steps': 541, 'batch_loss/train': 0.8946229419670999} +12/21/2021 16:18:41 - INFO - codeparrot_training - Step 542: {'lr': 0.0004999330689107789, 'samples': 278016, 'steps': 542, 'batch_loss/train': 0.9362280331552029} +12/21/2021 16:18:52 - INFO - codeparrot_training - Step 543: {'lr': 0.0004999327036831554, 'samples': 278528, 'steps': 543, 'batch_loss/train': 0.9131284700706601} +12/21/2021 16:19:02 - INFO - codeparrot_training - Step 544: {'lr': 0.0004999323374618937, 'samples': 279040, 'steps': 544, 'batch_loss/train': 0.9678662391379476} +12/21/2021 16:19:14 - INFO - codeparrot_training - Step 545: {'lr': 0.0004999319702469954, 'samples': 279552, 'steps': 545, 'batch_loss/train': 0.9269123962149024} +12/21/2021 16:19:25 - INFO - codeparrot_training - Step 546: {'lr': 0.0004999316020384621, 'samples': 280064, 'steps': 546, 'batch_loss/train': 0.9190409397706389} +12/21/2021 16:19:35 - INFO - codeparrot_training - Step 547: {'lr': 0.000499931232836295, 'samples': 280576, 'steps': 547, 'batch_loss/train': 0.8725959439761937} +12/21/2021 16:19:46 - INFO - codeparrot_training - Step 548: {'lr': 0.0004999308626404957, 'samples': 281088, 'steps': 548, 'batch_loss/train': 0.7994477478787303} +12/21/2021 16:19:58 - INFO - codeparrot_training - Step 549: {'lr': 0.0004999304914510657, 'samples': 281600, 'steps': 549, 'batch_loss/train': 0.9431182509288192} +12/21/2021 16:20:09 - INFO - codeparrot_training - Step 550: {'lr': 0.0004999301192680064, 'samples': 282112, 'steps': 550, 'batch_loss/train': 0.8486450358759612} +12/21/2021 16:20:20 - INFO - codeparrot_training - Step 551: {'lr': 0.0004999297460913193, 'samples': 282624, 'steps': 551, 'batch_loss/train': 0.9616946149617434} +12/21/2021 16:20:32 - INFO - codeparrot_training - Step 552: {'lr': 0.000499929371921006, 'samples': 283136, 'steps': 552, 'batch_loss/train': 0.9738977933302522} +12/21/2021 16:20:43 - INFO - codeparrot_training - Step 553: {'lr': 0.0004999289967570678, 'samples': 283648, 'steps': 553, 'batch_loss/train': 1.0289804823696613} +12/21/2021 16:20:53 - INFO - codeparrot_training - Step 554: {'lr': 0.0004999286205995063, 'samples': 284160, 'steps': 554, 'batch_loss/train': 0.796769333537668} +12/21/2021 16:21:04 - INFO - codeparrot_training - Step 555: {'lr': 0.000499928243448323, 'samples': 284672, 'steps': 555, 'batch_loss/train': 0.8846205677837133} +12/21/2021 16:21:16 - INFO - codeparrot_training - Step 556: {'lr': 0.0004999278653035193, 'samples': 285184, 'steps': 556, 'batch_loss/train': 0.8715219246223569} +12/21/2021 16:21:27 - INFO - codeparrot_training - Step 557: {'lr': 0.0004999274861650967, 'samples': 285696, 'steps': 557, 'batch_loss/train': 0.8064535167068243} +12/21/2021 16:21:37 - INFO - codeparrot_training - Step 558: {'lr': 0.0004999271060330571, 'samples': 286208, 'steps': 558, 'batch_loss/train': 0.8831348391249776} +12/21/2021 16:21:50 - INFO - codeparrot_training - Step 559: {'lr': 0.0004999267249074014, 'samples': 286720, 'steps': 559, 'batch_loss/train': 0.7371269129216671} +12/21/2021 16:22:01 - INFO - codeparrot_training - Step 560: {'lr': 0.0004999263427881316, 'samples': 287232, 'steps': 560, 'batch_loss/train': 1.0548465140163898} +12/21/2021 16:22:12 - INFO - codeparrot_training - Step 561: {'lr': 0.0004999259596752488, 'samples': 287744, 'steps': 561, 'batch_loss/train': 1.193105038255453} +12/21/2021 16:22:24 - INFO - codeparrot_training - Step 562: {'lr': 0.000499925575568755, 'samples': 288256, 'steps': 562, 'batch_loss/train': 0.9636553274467587} +12/21/2021 16:22:34 - INFO - codeparrot_training - Step 563: {'lr': 0.0004999251904686513, 'samples': 288768, 'steps': 563, 'batch_loss/train': 0.7096619489602745} +12/21/2021 16:22:45 - INFO - codeparrot_training - Step 564: {'lr': 0.0004999248043749395, 'samples': 289280, 'steps': 564, 'batch_loss/train': 1.0251792203634977} +12/21/2021 16:22:55 - INFO - codeparrot_training - Step 565: {'lr': 0.000499924417287621, 'samples': 289792, 'steps': 565, 'batch_loss/train': 1.1077187359333038} +12/21/2021 16:23:07 - INFO - codeparrot_training - Step 566: {'lr': 0.0004999240292066974, 'samples': 290304, 'steps': 566, 'batch_loss/train': 0.880513193551451} +12/21/2021 16:23:18 - INFO - codeparrot_training - Step 567: {'lr': 0.0004999236401321702, 'samples': 290816, 'steps': 567, 'batch_loss/train': 0.9123738692142069} +12/21/2021 16:23:29 - INFO - codeparrot_training - Step 568: {'lr': 0.0004999232500640411, 'samples': 291328, 'steps': 568, 'batch_loss/train': 0.9084219019860029} +12/21/2021 16:23:41 - INFO - codeparrot_training - Step 569: {'lr': 0.0004999228590023114, 'samples': 291840, 'steps': 569, 'batch_loss/train': 0.8991077225655317} +12/21/2021 16:23:52 - INFO - codeparrot_training - Step 570: {'lr': 0.0004999224669469829, 'samples': 292352, 'steps': 570, 'batch_loss/train': 0.9874973967671394} +12/21/2021 16:24:03 - INFO - codeparrot_training - Step 571: {'lr': 0.0004999220738980569, 'samples': 292864, 'steps': 571, 'batch_loss/train': 0.9690563306212425} +12/21/2021 16:24:15 - INFO - codeparrot_training - Step 572: {'lr': 0.0004999216798555351, 'samples': 293376, 'steps': 572, 'batch_loss/train': 0.8383851731196046} +12/21/2021 16:24:25 - INFO - codeparrot_training - Step 573: {'lr': 0.0004999212848194192, 'samples': 293888, 'steps': 573, 'batch_loss/train': 0.970521092414856} +12/21/2021 16:24:36 - INFO - codeparrot_training - Step 574: {'lr': 0.0004999208887897105, 'samples': 294400, 'steps': 574, 'batch_loss/train': 0.9525731345638633} +12/21/2021 16:24:48 - INFO - codeparrot_training - Step 575: {'lr': 0.0004999204917664108, 'samples': 294912, 'steps': 575, 'batch_loss/train': 0.8828167701140046} +12/21/2021 16:24:59 - INFO - codeparrot_training - Step 576: {'lr': 0.0004999200937495215, 'samples': 295424, 'steps': 576, 'batch_loss/train': 0.968753369525075} +12/21/2021 16:25:09 - INFO - codeparrot_training - Step 577: {'lr': 0.0004999196947390444, 'samples': 295936, 'steps': 577, 'batch_loss/train': 1.0379687119275331} +12/21/2021 16:25:20 - INFO - codeparrot_training - Step 578: {'lr': 0.0004999192947349809, 'samples': 296448, 'steps': 578, 'batch_loss/train': 0.9050431232899427} +12/21/2021 16:25:32 - INFO - codeparrot_training - Step 579: {'lr': 0.0004999188937373326, 'samples': 296960, 'steps': 579, 'batch_loss/train': 0.9736380837857723} +12/21/2021 16:25:42 - INFO - codeparrot_training - Step 580: {'lr': 0.0004999184917461012, 'samples': 297472, 'steps': 580, 'batch_loss/train': 0.8154596420936286} +12/21/2021 16:25:53 - INFO - codeparrot_training - Step 581: {'lr': 0.0004999180887612883, 'samples': 297984, 'steps': 581, 'batch_loss/train': 0.9418940469622612} +12/21/2021 16:26:06 - INFO - codeparrot_training - Step 582: {'lr': 0.0004999176847828953, 'samples': 298496, 'steps': 582, 'batch_loss/train': 0.9892596285790205} +12/21/2021 16:26:16 - INFO - codeparrot_training - Step 583: {'lr': 0.0004999172798109241, 'samples': 299008, 'steps': 583, 'batch_loss/train': 0.8385149957612157} +12/21/2021 16:26:27 - INFO - codeparrot_training - Step 584: {'lr': 0.0004999168738453761, 'samples': 299520, 'steps': 584, 'batch_loss/train': 0.8724001785740256} +12/21/2021 16:26:39 - INFO - codeparrot_training - Step 585: {'lr': 0.000499916466886253, 'samples': 300032, 'steps': 585, 'batch_loss/train': 0.9139909027144313} +12/21/2021 16:26:49 - INFO - codeparrot_training - Step 586: {'lr': 0.0004999160589335563, 'samples': 300544, 'steps': 586, 'batch_loss/train': 0.9542697379365563} +12/21/2021 16:27:00 - INFO - codeparrot_training - Step 587: {'lr': 0.0004999156499872878, 'samples': 301056, 'steps': 587, 'batch_loss/train': 0.9293676074594259} +12/21/2021 16:27:10 - INFO - codeparrot_training - Step 588: {'lr': 0.000499915240047449, 'samples': 301568, 'steps': 588, 'batch_loss/train': 0.8713159929029644} +12/21/2021 16:27:23 - INFO - codeparrot_training - Step 589: {'lr': 0.0004999148291140416, 'samples': 302080, 'steps': 589, 'batch_loss/train': 0.9792732698842883} +12/21/2021 16:27:34 - INFO - codeparrot_training - Step 590: {'lr': 0.0004999144171870672, 'samples': 302592, 'steps': 590, 'batch_loss/train': 0.799872491043061} +12/21/2021 16:27:44 - INFO - codeparrot_training - Step 591: {'lr': 0.0004999140042665274, 'samples': 303104, 'steps': 591, 'batch_loss/train': 0.9258495345711708} +12/21/2021 16:27:56 - INFO - codeparrot_training - Step 592: {'lr': 0.0004999135903524239, 'samples': 303616, 'steps': 592, 'batch_loss/train': 0.9421892203390598} +12/21/2021 16:28:07 - INFO - codeparrot_training - Step 593: {'lr': 0.0004999131754447584, 'samples': 304128, 'steps': 593, 'batch_loss/train': 0.869837237522006} +12/21/2021 16:28:18 - INFO - codeparrot_training - Step 594: {'lr': 0.0004999127595435324, 'samples': 304640, 'steps': 594, 'batch_loss/train': 0.97826305963099} +12/21/2021 16:28:30 - INFO - codeparrot_training - Step 595: {'lr': 0.0004999123426487476, 'samples': 305152, 'steps': 595, 'batch_loss/train': 0.9203468547202647} +12/21/2021 16:28:40 - INFO - codeparrot_training - Step 596: {'lr': 0.0004999119247604059, 'samples': 305664, 'steps': 596, 'batch_loss/train': 0.8747538402676582} +12/21/2021 16:28:51 - INFO - codeparrot_training - Step 597: {'lr': 0.0004999115058785085, 'samples': 306176, 'steps': 597, 'batch_loss/train': 1.036929794587195} +12/21/2021 16:29:03 - INFO - codeparrot_training - Step 598: {'lr': 0.0004999110860030573, 'samples': 306688, 'steps': 598, 'batch_loss/train': 0.9580811411142349} +12/21/2021 16:29:14 - INFO - codeparrot_training - Step 599: {'lr': 0.0004999106651340542, 'samples': 307200, 'steps': 599, 'batch_loss/train': 0.9031574176624417} +12/21/2021 16:29:25 - INFO - codeparrot_training - Step 600: {'lr': 0.0004999102432715005, 'samples': 307712, 'steps': 600, 'batch_loss/train': 0.9655727837234735} +12/21/2021 16:29:35 - INFO - codeparrot_training - Step 601: {'lr': 0.0004999098204153981, 'samples': 308224, 'steps': 601, 'batch_loss/train': 0.8549080542288721} +12/21/2021 16:29:47 - INFO - codeparrot_training - Step 602: {'lr': 0.0004999093965657487, 'samples': 308736, 'steps': 602, 'batch_loss/train': 1.1503985403105617} +12/21/2021 16:29:58 - INFO - codeparrot_training - Step 603: {'lr': 0.0004999089717225536, 'samples': 309248, 'steps': 603, 'batch_loss/train': 1.0146469259634614} +12/21/2021 16:30:09 - INFO - codeparrot_training - Step 604: {'lr': 0.000499908545885815, 'samples': 309760, 'steps': 604, 'batch_loss/train': 0.9436314627528191} +12/21/2021 16:30:22 - INFO - codeparrot_training - Step 605: {'lr': 0.0004999081190555345, 'samples': 310272, 'steps': 605, 'batch_loss/train': 0.7581614810042083} +12/21/2021 16:30:32 - INFO - codeparrot_training - Step 606: {'lr': 0.0004999076912317135, 'samples': 310784, 'steps': 606, 'batch_loss/train': 0.9093598071485758} +12/21/2021 16:30:43 - INFO - codeparrot_training - Step 607: {'lr': 0.0004999072624143539, 'samples': 311296, 'steps': 607, 'batch_loss/train': 0.7014100013766438} +12/21/2021 16:30:55 - INFO - codeparrot_training - Step 608: {'lr': 0.0004999068326034575, 'samples': 311808, 'steps': 608, 'batch_loss/train': 0.8505488268565387} +12/21/2021 16:31:05 - INFO - codeparrot_training - Step 609: {'lr': 0.0004999064017990258, 'samples': 312320, 'steps': 609, 'batch_loss/train': 1.0037020947784185} +12/21/2021 16:31:16 - INFO - codeparrot_training - Step 610: {'lr': 0.0004999059700010606, 'samples': 312832, 'steps': 610, 'batch_loss/train': 0.9056342476978898} +12/21/2021 16:31:26 - INFO - codeparrot_training - Step 611: {'lr': 0.0004999055372095636, 'samples': 313344, 'steps': 611, 'batch_loss/train': 0.8517302870750427} +12/21/2021 16:31:38 - INFO - codeparrot_training - Step 612: {'lr': 0.0004999051034245367, 'samples': 313856, 'steps': 612, 'batch_loss/train': 0.9454834964126348} +12/21/2021 16:31:49 - INFO - codeparrot_training - Step 613: {'lr': 0.0004999046686459813, 'samples': 314368, 'steps': 613, 'batch_loss/train': 0.9660000558942556} +12/21/2021 16:32:00 - INFO - codeparrot_training - Step 614: {'lr': 0.0004999042328738993, 'samples': 314880, 'steps': 614, 'batch_loss/train': 0.9627989996224642} +12/21/2021 16:32:12 - INFO - codeparrot_training - Step 615: {'lr': 0.0004999037961082926, 'samples': 315392, 'steps': 615, 'batch_loss/train': 0.7407698503229767} +12/21/2021 16:32:22 - INFO - codeparrot_training - Step 616: {'lr': 0.0004999033583491626, 'samples': 315904, 'steps': 616, 'batch_loss/train': 0.9574173782020807} +12/21/2021 16:32:33 - INFO - codeparrot_training - Step 617: {'lr': 0.0004999029195965112, 'samples': 316416, 'steps': 617, 'batch_loss/train': 0.953805978409946} +12/21/2021 16:32:46 - INFO - codeparrot_training - Step 618: {'lr': 0.0004999024798503403, 'samples': 316928, 'steps': 618, 'batch_loss/train': 0.9668229855597019} +12/21/2021 16:32:56 - INFO - codeparrot_training - Step 619: {'lr': 0.0004999020391106514, 'samples': 317440, 'steps': 619, 'batch_loss/train': 0.9141043045092374} +12/21/2021 16:33:07 - INFO - codeparrot_training - Step 620: {'lr': 0.0004999015973774463, 'samples': 317952, 'steps': 620, 'batch_loss/train': 0.8565683905035257} +12/21/2021 16:33:17 - INFO - codeparrot_training - Step 621: {'lr': 0.000499901154650727, 'samples': 318464, 'steps': 621, 'batch_loss/train': 0.9203378558158875} +12/21/2021 16:33:30 - INFO - codeparrot_training - Step 622: {'lr': 0.0004999007109304949, 'samples': 318976, 'steps': 622, 'batch_loss/train': 0.8640285274013877} +12/21/2021 16:33:40 - INFO - codeparrot_training - Step 623: {'lr': 0.000499900266216752, 'samples': 319488, 'steps': 623, 'batch_loss/train': 0.7352262572385371} +12/21/2021 16:33:51 - INFO - codeparrot_training - Step 624: {'lr': 0.0004998998205094999, 'samples': 320000, 'steps': 624, 'batch_loss/train': 1.1667565982788801} +12/21/2021 16:34:03 - INFO - codeparrot_training - Step 625: {'lr': 0.0004998993738087406, 'samples': 320512, 'steps': 625, 'batch_loss/train': 0.8875412936322391} +12/21/2021 16:34:13 - INFO - codeparrot_training - Step 626: {'lr': 0.0004998989261144757, 'samples': 321024, 'steps': 626, 'batch_loss/train': 0.8262747582048178} +12/21/2021 16:34:24 - INFO - codeparrot_training - Step 627: {'lr': 0.000499898477426707, 'samples': 321536, 'steps': 627, 'batch_loss/train': 0.8741793856024742} +12/21/2021 16:34:37 - INFO - codeparrot_training - Step 628: {'lr': 0.0004998980277454364, 'samples': 322048, 'steps': 628, 'batch_loss/train': 0.8630499104037881} +12/21/2021 16:34:47 - INFO - codeparrot_training - Step 629: {'lr': 0.0004998975770706655, 'samples': 322560, 'steps': 629, 'batch_loss/train': 0.927836736664176} +12/21/2021 16:34:58 - INFO - codeparrot_training - Step 630: {'lr': 0.0004998971254023962, 'samples': 323072, 'steps': 630, 'batch_loss/train': 0.8090596257243305} +12/21/2021 16:35:10 - INFO - codeparrot_training - Step 631: {'lr': 0.0004998966727406303, 'samples': 323584, 'steps': 631, 'batch_loss/train': 0.9767442140728235} +12/21/2021 16:35:21 - INFO - codeparrot_training - Step 632: {'lr': 0.0004998962190853696, 'samples': 324096, 'steps': 632, 'batch_loss/train': 0.9817150719463825} +12/21/2021 16:35:32 - INFO - codeparrot_training - Step 633: {'lr': 0.0004998957644366159, 'samples': 324608, 'steps': 633, 'batch_loss/train': 0.8378247471991926} +12/21/2021 16:35:42 - INFO - codeparrot_training - Step 634: {'lr': 0.0004998953087943709, 'samples': 325120, 'steps': 634, 'batch_loss/train': 0.9648652654141188} +12/21/2021 16:35:55 - INFO - codeparrot_training - Step 635: {'lr': 0.0004998948521586367, 'samples': 325632, 'steps': 635, 'batch_loss/train': 0.945153484120965} +12/21/2021 16:36:05 - INFO - codeparrot_training - Step 636: {'lr': 0.0004998943945294147, 'samples': 326144, 'steps': 636, 'batch_loss/train': 0.8803400984033942} +12/21/2021 16:36:16 - INFO - codeparrot_training - Step 637: {'lr': 0.0004998939359067072, 'samples': 326656, 'steps': 637, 'batch_loss/train': 1.0595063250511885} +12/21/2021 16:36:28 - INFO - codeparrot_training - Step 638: {'lr': 0.0004998934762905157, 'samples': 327168, 'steps': 638, 'batch_loss/train': 0.8174590254202485} +12/21/2021 16:36:39 - INFO - codeparrot_training - Step 639: {'lr': 0.0004998930156808419, 'samples': 327680, 'steps': 639, 'batch_loss/train': 0.876562980003655} +12/21/2021 16:36:49 - INFO - codeparrot_training - Step 640: {'lr': 0.000499892554077688, 'samples': 328192, 'steps': 640, 'batch_loss/train': 1.021360730752349} +12/21/2021 16:37:01 - INFO - codeparrot_training - Step 641: {'lr': 0.0004998920914810556, 'samples': 328704, 'steps': 641, 'batch_loss/train': 0.9346235450357199} +12/21/2021 16:37:12 - INFO - codeparrot_training - Step 642: {'lr': 0.0004998916278909466, 'samples': 329216, 'steps': 642, 'batch_loss/train': 0.9330318197607994} +12/21/2021 16:37:22 - INFO - codeparrot_training - Step 643: {'lr': 0.0004998911633073629, 'samples': 329728, 'steps': 643, 'batch_loss/train': 0.9473899034783244} +12/21/2021 16:37:35 - INFO - codeparrot_training - Step 644: {'lr': 0.0004998906977303062, 'samples': 330240, 'steps': 644, 'batch_loss/train': 0.7694513439200819} +12/21/2021 16:37:45 - INFO - codeparrot_training - Step 645: {'lr': 0.0004998902311597785, 'samples': 330752, 'steps': 645, 'batch_loss/train': 0.5681499605998397} +12/21/2021 16:37:56 - INFO - codeparrot_training - Step 646: {'lr': 0.0004998897635957815, 'samples': 331264, 'steps': 646, 'batch_loss/train': 0.9241367857903242} +12/21/2021 16:38:06 - INFO - codeparrot_training - Step 647: {'lr': 0.0004998892950383172, 'samples': 331776, 'steps': 647, 'batch_loss/train': 1.0140241999179125} +12/21/2021 16:38:19 - INFO - codeparrot_training - Step 648: {'lr': 0.0004998888254873875, 'samples': 332288, 'steps': 648, 'batch_loss/train': 0.8737476663663983} +12/21/2021 16:38:30 - INFO - codeparrot_training - Step 649: {'lr': 0.0004998883549429942, 'samples': 332800, 'steps': 649, 'batch_loss/train': 0.9440994933247566} +12/21/2021 16:38:40 - INFO - codeparrot_training - Step 650: {'lr': 0.000499887883405139, 'samples': 333312, 'steps': 650, 'batch_loss/train': 0.9624589420855045} +12/21/2021 16:38:52 - INFO - codeparrot_training - Step 651: {'lr': 0.0004998874108738241, 'samples': 333824, 'steps': 651, 'batch_loss/train': 1.0394103089347482} +12/21/2021 16:39:03 - INFO - codeparrot_training - Step 652: {'lr': 0.0004998869373490511, 'samples': 334336, 'steps': 652, 'batch_loss/train': 0.8162920204922557} +12/21/2021 16:39:13 - INFO - codeparrot_training - Step 653: {'lr': 0.000499886462830822, 'samples': 334848, 'steps': 653, 'batch_loss/train': 0.8337281758431345} +12/21/2021 16:39:26 - INFO - codeparrot_training - Step 654: {'lr': 0.0004998859873191387, 'samples': 335360, 'steps': 654, 'batch_loss/train': 0.8260816955007613} +12/21/2021 16:39:36 - INFO - codeparrot_training - Step 655: {'lr': 0.0004998855108140031, 'samples': 335872, 'steps': 655, 'batch_loss/train': 0.8891378277912736} +12/21/2021 16:39:47 - INFO - codeparrot_training - Step 656: {'lr': 0.000499885033315417, 'samples': 336384, 'steps': 656, 'batch_loss/train': 0.9109824746847153} +12/21/2021 16:40:00 - INFO - codeparrot_training - Step 657: {'lr': 0.0004998845548233825, 'samples': 336896, 'steps': 657, 'batch_loss/train': 0.9319252073764801} +12/21/2021 16:40:10 - INFO - codeparrot_training - Step 658: {'lr': 0.0004998840753379011, 'samples': 337408, 'steps': 658, 'batch_loss/train': 0.8713351637125015} +12/21/2021 16:40:21 - INFO - codeparrot_training - Step 659: {'lr': 0.0004998835948589751, 'samples': 337920, 'steps': 659, 'batch_loss/train': 0.9683993626385927} +12/21/2021 16:40:31 - INFO - codeparrot_training - Step 660: {'lr': 0.0004998831133866064, 'samples': 338432, 'steps': 660, 'batch_loss/train': 0.865633636713028} +12/21/2021 16:40:43 - INFO - codeparrot_training - Step 661: {'lr': 0.0004998826309207966, 'samples': 338944, 'steps': 661, 'batch_loss/train': 0.9237379627302289} +12/21/2021 16:40:54 - INFO - codeparrot_training - Step 662: {'lr': 0.0004998821474615481, 'samples': 339456, 'steps': 662, 'batch_loss/train': 0.863347053527832} +12/21/2021 16:41:05 - INFO - codeparrot_training - Step 663: {'lr': 0.0004998816630088622, 'samples': 339968, 'steps': 663, 'batch_loss/train': 0.9137359028682113} +12/21/2021 16:41:17 - INFO - codeparrot_training - Step 664: {'lr': 0.0004998811775627413, 'samples': 340480, 'steps': 664, 'batch_loss/train': 0.8101784214377403} +12/21/2021 16:41:28 - INFO - codeparrot_training - Step 665: {'lr': 0.0004998806911231873, 'samples': 340992, 'steps': 665, 'batch_loss/train': 0.8837747406214476} +12/21/2021 16:41:38 - INFO - codeparrot_training - Step 666: {'lr': 0.0004998802036902018, 'samples': 341504, 'steps': 666, 'batch_loss/train': 0.9349890789017081} +12/21/2021 16:41:50 - INFO - codeparrot_training - Step 667: {'lr': 0.0004998797152637872, 'samples': 342016, 'steps': 667, 'batch_loss/train': 1.0027073482051492} +12/21/2021 16:42:01 - INFO - codeparrot_training - Step 668: {'lr': 0.000499879225843945, 'samples': 342528, 'steps': 668, 'batch_loss/train': 0.9386633480899036} +12/21/2021 16:42:12 - INFO - codeparrot_training - Step 669: {'lr': 0.0004998787354306774, 'samples': 343040, 'steps': 669, 'batch_loss/train': 0.8936235718429089} +12/21/2021 16:42:22 - INFO - codeparrot_training - Step 670: {'lr': 0.0004998782440239863, 'samples': 343552, 'steps': 670, 'batch_loss/train': 0.9008663929998875} +12/21/2021 16:42:35 - INFO - codeparrot_training - Step 671: {'lr': 0.0004998777516238737, 'samples': 344064, 'steps': 671, 'batch_loss/train': 0.9066405333578587} +12/21/2021 16:42:45 - INFO - codeparrot_training - Step 672: {'lr': 0.0004998772582303414, 'samples': 344576, 'steps': 672, 'batch_loss/train': 0.8753687320277095} +12/21/2021 16:42:56 - INFO - codeparrot_training - Step 673: {'lr': 0.0004998767638433917, 'samples': 345088, 'steps': 673, 'batch_loss/train': 0.8501032739877701} +12/21/2021 16:43:09 - INFO - codeparrot_training - Step 674: {'lr': 0.0004998762684630261, 'samples': 345600, 'steps': 674, 'batch_loss/train': 0.9156850469298661} +12/21/2021 16:43:19 - INFO - codeparrot_training - Step 675: {'lr': 0.0004998757720892469, 'samples': 346112, 'steps': 675, 'batch_loss/train': 0.8992579788900912} +12/21/2021 16:43:30 - INFO - codeparrot_training - Step 676: {'lr': 0.000499875274722056, 'samples': 346624, 'steps': 676, 'batch_loss/train': 0.8720930330455303} +12/21/2021 16:43:42 - INFO - codeparrot_training - Step 677: {'lr': 0.0004998747763614554, 'samples': 347136, 'steps': 677, 'batch_loss/train': 0.9037777651101351} +12/21/2021 16:43:52 - INFO - codeparrot_training - Step 678: {'lr': 0.0004998742770074469, 'samples': 347648, 'steps': 678, 'batch_loss/train': 0.9344880515709519} +12/21/2021 16:44:03 - INFO - codeparrot_training - Step 679: {'lr': 0.0004998737766600327, 'samples': 348160, 'steps': 679, 'batch_loss/train': 0.8562336768954992} +12/21/2021 16:44:15 - INFO - codeparrot_training - Step 680: {'lr': 0.0004998732753192148, 'samples': 348672, 'steps': 680, 'batch_loss/train': 0.8585494672879577} +12/21/2021 16:44:26 - INFO - codeparrot_training - Step 681: {'lr': 0.0004998727729849949, 'samples': 349184, 'steps': 681, 'batch_loss/train': 0.9700227463617921} +12/21/2021 16:44:36 - INFO - codeparrot_training - Step 682: {'lr': 0.0004998722696573753, 'samples': 349696, 'steps': 682, 'batch_loss/train': 0.9024811657145619} +12/21/2021 16:44:47 - INFO - codeparrot_training - Step 683: {'lr': 0.000499871765336358, 'samples': 350208, 'steps': 683, 'batch_loss/train': 0.9197928688954562} +12/21/2021 16:44:59 - INFO - codeparrot_training - Step 684: {'lr': 0.0004998712600219449, 'samples': 350720, 'steps': 684, 'batch_loss/train': 0.8372736421879381} +12/21/2021 16:45:10 - INFO - codeparrot_training - Step 685: {'lr': 0.0004998707537141378, 'samples': 351232, 'steps': 685, 'batch_loss/train': 0.7522589818108827} +12/21/2021 16:45:20 - INFO - codeparrot_training - Step 686: {'lr': 0.0004998702464129391, 'samples': 351744, 'steps': 686, 'batch_loss/train': 0.852756405249238} +12/21/2021 16:45:33 - INFO - codeparrot_training - Step 687: {'lr': 0.0004998697381183506, 'samples': 352256, 'steps': 687, 'batch_loss/train': 0.9529910506680608} +12/21/2021 16:45:43 - INFO - codeparrot_training - Step 688: {'lr': 0.0004998692288303745, 'samples': 352768, 'steps': 688, 'batch_loss/train': 0.9314002264291048} +12/21/2021 16:45:54 - INFO - codeparrot_training - Step 689: {'lr': 0.0004998687185490125, 'samples': 353280, 'steps': 689, 'batch_loss/train': 0.8984973533079028} +12/21/2021 16:46:06 - INFO - codeparrot_training - Step 690: {'lr': 0.000499868207274267, 'samples': 353792, 'steps': 690, 'batch_loss/train': 0.9233706695958972} +12/21/2021 16:46:17 - INFO - codeparrot_training - Step 691: {'lr': 0.0004998676950061397, 'samples': 354304, 'steps': 691, 'batch_loss/train': 0.8453902406617999} +12/21/2021 16:46:27 - INFO - codeparrot_training - Step 692: {'lr': 0.0004998671817446329, 'samples': 354816, 'steps': 692, 'batch_loss/train': 0.8967659650370479} +12/21/2021 16:46:38 - INFO - codeparrot_training - Step 693: {'lr': 0.0004998666674897484, 'samples': 355328, 'steps': 693, 'batch_loss/train': 0.9203964900225401} +12/21/2021 16:46:50 - INFO - codeparrot_training - Step 694: {'lr': 0.0004998661522414885, 'samples': 355840, 'steps': 694, 'batch_loss/train': 0.9685180312953889} +12/21/2021 16:47:00 - INFO - codeparrot_training - Step 695: {'lr': 0.0004998656359998552, 'samples': 356352, 'steps': 695, 'batch_loss/train': 0.782053689006716} +12/21/2021 16:47:11 - INFO - codeparrot_training - Step 696: {'lr': 0.0004998651187648503, 'samples': 356864, 'steps': 696, 'batch_loss/train': 0.9232744155451655} +12/21/2021 16:47:24 - INFO - codeparrot_training - Step 697: {'lr': 0.000499864600536476, 'samples': 357376, 'steps': 697, 'batch_loss/train': 0.9550192523747683} +12/21/2021 16:47:35 - INFO - codeparrot_training - Step 698: {'lr': 0.0004998640813147345, 'samples': 357888, 'steps': 698, 'batch_loss/train': 0.9095878661610186} +12/21/2021 16:47:45 - INFO - codeparrot_training - Step 699: {'lr': 0.0004998635610996278, 'samples': 358400, 'steps': 699, 'batch_loss/train': 0.9093206040561199} +12/21/2021 16:47:57 - INFO - codeparrot_training - Step 700: {'lr': 0.0004998630398911579, 'samples': 358912, 'steps': 700, 'batch_loss/train': 0.7932508641388267} +12/21/2021 16:48:08 - INFO - codeparrot_training - Step 701: {'lr': 0.0004998625176893269, 'samples': 359424, 'steps': 701, 'batch_loss/train': 0.8625519368797541} +12/21/2021 16:48:18 - INFO - codeparrot_training - Step 702: {'lr': 0.0004998619944941369, 'samples': 359936, 'steps': 702, 'batch_loss/train': 0.9058898026123643} +12/21/2021 16:48:29 - INFO - codeparrot_training - Step 703: {'lr': 0.0004998614703055898, 'samples': 360448, 'steps': 703, 'batch_loss/train': 0.7993348529562354} +12/21/2021 16:48:42 - INFO - codeparrot_training - Step 704: {'lr': 0.0004998609451236879, 'samples': 360960, 'steps': 704, 'batch_loss/train': 0.9983088783919811} +12/21/2021 16:48:52 - INFO - codeparrot_training - Step 705: {'lr': 0.0004998604189484333, 'samples': 361472, 'steps': 705, 'batch_loss/train': 1.0443515917286277} +12/21/2021 16:49:03 - INFO - codeparrot_training - Step 706: {'lr': 0.000499859891779828, 'samples': 361984, 'steps': 706, 'batch_loss/train': 0.9746590838767588} +12/21/2021 16:49:15 - INFO - codeparrot_training - Step 707: {'lr': 0.000499859363617874, 'samples': 362496, 'steps': 707, 'batch_loss/train': 0.9542826134711504} +12/21/2021 16:49:25 - INFO - codeparrot_training - Step 708: {'lr': 0.0004998588344625736, 'samples': 363008, 'steps': 708, 'batch_loss/train': 0.8941388195380569} +12/21/2021 16:49:36 - INFO - codeparrot_training - Step 709: {'lr': 0.0004998583043139289, 'samples': 363520, 'steps': 709, 'batch_loss/train': 0.9298544200137258} +12/21/2021 16:49:48 - INFO - codeparrot_training - Step 710: {'lr': 0.0004998577731719419, 'samples': 364032, 'steps': 710, 'batch_loss/train': 0.9288242040202022} +12/21/2021 16:49:59 - INFO - codeparrot_training - Step 711: {'lr': 0.0004998572410366146, 'samples': 364544, 'steps': 711, 'batch_loss/train': 0.9556368067860603} +12/21/2021 16:50:09 - INFO - codeparrot_training - Step 712: {'lr': 0.0004998567079079494, 'samples': 365056, 'steps': 712, 'batch_loss/train': 0.9007089296355844} +12/21/2021 16:50:22 - INFO - codeparrot_training - Step 713: {'lr': 0.0004998561737859482, 'samples': 365568, 'steps': 713, 'batch_loss/train': 1.0700763603672385} +12/21/2021 16:50:33 - INFO - codeparrot_training - Step 714: {'lr': 0.0004998556386706133, 'samples': 366080, 'steps': 714, 'batch_loss/train': 0.9096572594717145} +12/21/2021 16:50:43 - INFO - codeparrot_training - Step 715: {'lr': 0.0004998551025619466, 'samples': 366592, 'steps': 715, 'batch_loss/train': 0.9411030448973179} +12/21/2021 16:50:54 - INFO - codeparrot_training - Step 716: {'lr': 0.0004998545654599503, 'samples': 367104, 'steps': 716, 'batch_loss/train': 0.9920748313888907} +12/21/2021 16:51:06 - INFO - codeparrot_training - Step 717: {'lr': 0.0004998540273646267, 'samples': 367616, 'steps': 717, 'batch_loss/train': 1.0416659973561764} +12/21/2021 16:51:17 - INFO - codeparrot_training - Step 718: {'lr': 0.0004998534882759778, 'samples': 368128, 'steps': 718, 'batch_loss/train': 0.7783045200631022} +12/21/2021 16:51:28 - INFO - codeparrot_training - Step 719: {'lr': 0.0004998529481940058, 'samples': 368640, 'steps': 719, 'batch_loss/train': 0.9713968969881535} +12/21/2021 16:51:40 - INFO - codeparrot_training - Step 720: {'lr': 0.0004998524071187127, 'samples': 369152, 'steps': 720, 'batch_loss/train': 0.9542250568047166} +12/21/2021 16:51:50 - INFO - codeparrot_training - Step 721: {'lr': 0.0004998518650501009, 'samples': 369664, 'steps': 721, 'batch_loss/train': 0.918422156944871} +12/21/2021 16:52:01 - INFO - codeparrot_training - Step 722: {'lr': 0.0004998513219881724, 'samples': 370176, 'steps': 722, 'batch_loss/train': 0.8979240972548723} +12/21/2021 16:52:13 - INFO - codeparrot_training - Step 723: {'lr': 0.0004998507779329294, 'samples': 370688, 'steps': 723, 'batch_loss/train': 1.0303898230195045} +12/21/2021 16:52:23 - INFO - codeparrot_training - Step 724: {'lr': 0.000499850232884374, 'samples': 371200, 'steps': 724, 'batch_loss/train': 0.9922699043527246} +12/21/2021 16:52:34 - INFO - codeparrot_training - Step 725: {'lr': 0.0004998496868425085, 'samples': 371712, 'steps': 725, 'batch_loss/train': 0.9277333309873939} +12/21/2021 16:52:47 - INFO - codeparrot_training - Step 726: {'lr': 0.0004998491398073348, 'samples': 372224, 'steps': 726, 'batch_loss/train': 0.9918480785563588} +12/21/2021 16:52:57 - INFO - codeparrot_training - Step 727: {'lr': 0.0004998485917788554, 'samples': 372736, 'steps': 727, 'batch_loss/train': 0.8040990834124386} +12/21/2021 16:53:08 - INFO - codeparrot_training - Step 728: {'lr': 0.0004998480427570723, 'samples': 373248, 'steps': 728, 'batch_loss/train': 0.8798563666641712} +12/21/2021 16:53:18 - INFO - codeparrot_training - Step 729: {'lr': 0.0004998474927419878, 'samples': 373760, 'steps': 729, 'batch_loss/train': 0.9049311252310872} +12/21/2021 16:53:30 - INFO - codeparrot_training - Step 730: {'lr': 0.0004998469417336039, 'samples': 374272, 'steps': 730, 'batch_loss/train': 0.8904352216050029} +12/21/2021 16:53:41 - INFO - codeparrot_training - Step 731: {'lr': 0.000499846389731923, 'samples': 374784, 'steps': 731, 'batch_loss/train': 0.9411286730319262} +12/21/2021 16:53:51 - INFO - codeparrot_training - Step 732: {'lr': 0.000499845836736947, 'samples': 375296, 'steps': 732, 'batch_loss/train': 0.9338088165968657} +12/21/2021 16:54:04 - INFO - codeparrot_training - Step 733: {'lr': 0.0004998452827486785, 'samples': 375808, 'steps': 733, 'batch_loss/train': 0.9239707002416253} +12/21/2021 16:54:15 - INFO - codeparrot_training - Step 734: {'lr': 0.0004998447277671194, 'samples': 376320, 'steps': 734, 'batch_loss/train': 0.9076833575963974} +12/21/2021 16:54:25 - INFO - codeparrot_training - Step 735: {'lr': 0.0004998441717922721, 'samples': 376832, 'steps': 735, 'batch_loss/train': 0.9181579956784844} +12/21/2021 16:54:37 - INFO - codeparrot_training - Step 736: {'lr': 0.0004998436148241386, 'samples': 377344, 'steps': 736, 'batch_loss/train': 0.9060037350282073} +12/21/2021 16:54:48 - INFO - codeparrot_training - Step 737: {'lr': 0.0004998430568627213, 'samples': 377856, 'steps': 737, 'batch_loss/train': 1.014872153289616} +12/21/2021 16:54:58 - INFO - codeparrot_training - Step 738: {'lr': 0.0004998424979080222, 'samples': 378368, 'steps': 738, 'batch_loss/train': 0.9603051524609327} +12/21/2021 16:55:10 - INFO - codeparrot_training - Step 739: {'lr': 0.0004998419379600438, 'samples': 378880, 'steps': 739, 'batch_loss/train': 0.8300898065790534} +12/21/2021 16:55:21 - INFO - codeparrot_training - Step 740: {'lr': 0.0004998413770187882, 'samples': 379392, 'steps': 740, 'batch_loss/train': 0.942758864723146} +12/21/2021 16:55:32 - INFO - codeparrot_training - Step 741: {'lr': 0.0004998408150842575, 'samples': 379904, 'steps': 741, 'batch_loss/train': 0.9354134553577751} +12/21/2021 16:55:42 - INFO - codeparrot_training - Step 742: {'lr': 0.0004998402521564541, 'samples': 380416, 'steps': 742, 'batch_loss/train': 0.8622790761291981} +12/21/2021 16:55:55 - INFO - codeparrot_training - Step 743: {'lr': 0.0004998396882353803, 'samples': 380928, 'steps': 743, 'batch_loss/train': 0.8875884525477886} +12/21/2021 16:56:06 - INFO - codeparrot_training - Step 744: {'lr': 0.0004998391233210381, 'samples': 381440, 'steps': 744, 'batch_loss/train': 0.8584947464987636} +12/21/2021 16:56:17 - INFO - codeparrot_training - Step 745: {'lr': 0.00049983855741343, 'samples': 381952, 'steps': 745, 'batch_loss/train': 0.8631401276215911} +12/21/2021 16:56:29 - INFO - codeparrot_training - Step 746: {'lr': 0.0004998379905125581, 'samples': 382464, 'steps': 746, 'batch_loss/train': 0.978673106059432} +12/21/2021 16:56:39 - INFO - codeparrot_training - Step 747: {'lr': 0.0004998374226184246, 'samples': 382976, 'steps': 747, 'batch_loss/train': 1.0031325398012996} +12/21/2021 16:56:50 - INFO - codeparrot_training - Step 748: {'lr': 0.000499836853731032, 'samples': 383488, 'steps': 748, 'batch_loss/train': 0.9518019109964371} +12/21/2021 16:57:02 - INFO - codeparrot_training - Step 749: {'lr': 0.0004998362838503822, 'samples': 384000, 'steps': 749, 'batch_loss/train': 0.926420415751636} +12/21/2021 16:57:13 - INFO - codeparrot_training - Step 750: {'lr': 0.0004998357129764778, 'samples': 384512, 'steps': 750, 'batch_loss/train': 0.9860595315694809} +12/21/2021 16:57:23 - INFO - codeparrot_training - Step 751: {'lr': 0.0004998351411093209, 'samples': 385024, 'steps': 751, 'batch_loss/train': 0.8680475722067058} +12/21/2021 16:57:34 - INFO - codeparrot_training - Step 752: {'lr': 0.0004998345682489139, 'samples': 385536, 'steps': 752, 'batch_loss/train': 0.930605205707252} +12/21/2021 16:57:47 - INFO - codeparrot_training - Step 753: {'lr': 0.0004998339943952588, 'samples': 386048, 'steps': 753, 'batch_loss/train': 0.8897510543465614} +12/21/2021 16:57:57 - INFO - codeparrot_training - Step 754: {'lr': 0.0004998334195483583, 'samples': 386560, 'steps': 754, 'batch_loss/train': 0.9025165792554617} +12/21/2021 16:58:08 - INFO - codeparrot_training - Step 755: {'lr': 0.0004998328437082143, 'samples': 387072, 'steps': 755, 'batch_loss/train': 0.9224835326895118} +12/21/2021 16:58:20 - INFO - codeparrot_training - Step 756: {'lr': 0.0004998322668748293, 'samples': 387584, 'steps': 756, 'batch_loss/train': 0.8922624634578824} +12/21/2021 16:58:30 - INFO - codeparrot_training - Step 757: {'lr': 0.0004998316890482055, 'samples': 388096, 'steps': 757, 'batch_loss/train': 0.9402753226459026} +12/21/2021 16:58:41 - INFO - codeparrot_training - Step 758: {'lr': 0.0004998311102283453, 'samples': 388608, 'steps': 758, 'batch_loss/train': 0.9860587837174535} +12/21/2021 16:58:53 - INFO - codeparrot_training - Step 759: {'lr': 0.0004998305304152508, 'samples': 389120, 'steps': 759, 'batch_loss/train': 0.965982579626143} +12/21/2021 16:59:04 - INFO - codeparrot_training - Step 760: {'lr': 0.0004998299496089246, 'samples': 389632, 'steps': 760, 'batch_loss/train': 0.8267340925522149} +12/21/2021 16:59:14 - INFO - codeparrot_training - Step 761: {'lr': 0.0004998293678093688, 'samples': 390144, 'steps': 761, 'batch_loss/train': 0.9654532987624407} +12/21/2021 16:59:27 - INFO - codeparrot_training - Step 762: {'lr': 0.0004998287850165858, 'samples': 390656, 'steps': 762, 'batch_loss/train': 0.9248868841677904} +12/21/2021 16:59:38 - INFO - codeparrot_training - Step 763: {'lr': 0.0004998282012305778, 'samples': 391168, 'steps': 763, 'batch_loss/train': 0.7699675001204014} +12/21/2021 16:59:49 - INFO - codeparrot_training - Step 764: {'lr': 0.0004998276164513473, 'samples': 391680, 'steps': 764, 'batch_loss/train': 0.998703227378428} +12/21/2021 16:59:59 - INFO - codeparrot_training - Step 765: {'lr': 0.0004998270306788965, 'samples': 392192, 'steps': 765, 'batch_loss/train': 1.2129826387390494} +12/21/2021 17:00:11 - INFO - codeparrot_training - Step 766: {'lr': 0.0004998264439132278, 'samples': 392704, 'steps': 766, 'batch_loss/train': 1.055231023579836} +12/21/2021 17:00:22 - INFO - codeparrot_training - Step 767: {'lr': 0.0004998258561543433, 'samples': 393216, 'steps': 767, 'batch_loss/train': 0.9193539707921445} +12/21/2021 17:00:32 - INFO - codeparrot_training - Step 768: {'lr': 0.0004998252674022457, 'samples': 393728, 'steps': 768, 'batch_loss/train': 0.9410045798867941} +12/21/2021 17:00:44 - INFO - codeparrot_training - Step 769: {'lr': 0.0004998246776569371, 'samples': 394240, 'steps': 769, 'batch_loss/train': 0.8575128288939595} +12/21/2021 17:00:55 - INFO - codeparrot_training - Step 770: {'lr': 0.00049982408691842, 'samples': 394752, 'steps': 770, 'batch_loss/train': 0.8406329331919551} +12/21/2021 17:01:05 - INFO - codeparrot_training - Step 771: {'lr': 0.0004998234951866966, 'samples': 395264, 'steps': 771, 'batch_loss/train': 0.8926150659099221} +12/21/2021 17:01:16 - INFO - codeparrot_training - Step 772: {'lr': 0.0004998229024617693, 'samples': 395776, 'steps': 772, 'batch_loss/train': 0.8969995081424713} +12/21/2021 17:01:29 - INFO - codeparrot_training - Step 773: {'lr': 0.0004998223087436404, 'samples': 396288, 'steps': 773, 'batch_loss/train': 0.7905642939731479} +12/21/2021 17:01:39 - INFO - codeparrot_training - Step 774: {'lr': 0.0004998217140323125, 'samples': 396800, 'steps': 774, 'batch_loss/train': 0.9869577521458268} +12/21/2021 17:01:50 - INFO - codeparrot_training - Step 775: {'lr': 0.0004998211183277877, 'samples': 397312, 'steps': 775, 'batch_loss/train': 0.7224799026735127} +12/21/2021 17:02:02 - INFO - codeparrot_training - Step 776: {'lr': 0.0004998205216300685, 'samples': 397824, 'steps': 776, 'batch_loss/train': 0.956611798144877} +12/21/2021 17:02:13 - INFO - codeparrot_training - Step 777: {'lr': 0.000499819923939157, 'samples': 398336, 'steps': 777, 'batch_loss/train': 0.6940632057376206} +12/21/2021 17:02:23 - INFO - codeparrot_training - Step 778: {'lr': 0.0004998193252550562, 'samples': 398848, 'steps': 778, 'batch_loss/train': 0.9514288548380136} +12/21/2021 17:02:36 - INFO - codeparrot_training - Step 779: {'lr': 0.0004998187255777678, 'samples': 399360, 'steps': 779, 'batch_loss/train': 0.8291643848642707} +12/21/2021 17:02:46 - INFO - codeparrot_training - Step 780: {'lr': 0.0004998181249072946, 'samples': 399872, 'steps': 780, 'batch_loss/train': 0.9225154826417565} +12/21/2021 17:02:57 - INFO - codeparrot_training - Step 781: {'lr': 0.0004998175232436388, 'samples': 400384, 'steps': 781, 'batch_loss/train': 0.7809789045713842} +12/21/2021 17:03:10 - INFO - codeparrot_training - Step 782: {'lr': 0.0004998169205868029, 'samples': 400896, 'steps': 782, 'batch_loss/train': 0.9135488187894225} +12/21/2021 17:03:20 - INFO - codeparrot_training - Step 783: {'lr': 0.0004998163169367894, 'samples': 401408, 'steps': 783, 'batch_loss/train': 0.9129585232585669} +12/21/2021 17:03:31 - INFO - codeparrot_training - Step 784: {'lr': 0.0004998157122936002, 'samples': 401920, 'steps': 784, 'batch_loss/train': 0.8906822032295167} +12/21/2021 17:03:41 - INFO - codeparrot_training - Step 785: {'lr': 0.0004998151066572383, 'samples': 402432, 'steps': 785, 'batch_loss/train': 0.9094138201326132} +12/21/2021 17:03:53 - INFO - codeparrot_training - Step 786: {'lr': 0.0004998145000277058, 'samples': 402944, 'steps': 786, 'batch_loss/train': 1.1266619339585304} +12/21/2021 17:04:04 - INFO - codeparrot_training - Step 787: {'lr': 0.0004998138924050051, 'samples': 403456, 'steps': 787, 'batch_loss/train': 0.9350923728197813} +12/21/2021 17:04:15 - INFO - codeparrot_training - Step 788: {'lr': 0.0004998132837891389, 'samples': 403968, 'steps': 788, 'batch_loss/train': 0.8893251535482705} +12/21/2021 17:04:27 - INFO - codeparrot_training - Step 789: {'lr': 0.0004998126741801092, 'samples': 404480, 'steps': 789, 'batch_loss/train': 0.9448728957213461} +12/21/2021 17:04:38 - INFO - codeparrot_training - Step 790: {'lr': 0.0004998120635779187, 'samples': 404992, 'steps': 790, 'batch_loss/train': 0.9978616088628769} +12/21/2021 17:04:48 - INFO - codeparrot_training - Step 791: {'lr': 0.0004998114519825698, 'samples': 405504, 'steps': 791, 'batch_loss/train': 0.8847496602684259} +12/21/2021 17:05:00 - INFO - codeparrot_training - Step 792: {'lr': 0.0004998108393940649, 'samples': 406016, 'steps': 792, 'batch_loss/train': 0.9300745222717524} +12/21/2021 17:05:11 - INFO - codeparrot_training - Step 793: {'lr': 0.0004998102258124065, 'samples': 406528, 'steps': 793, 'batch_loss/train': 0.9128897180780768} +12/21/2021 17:05:22 - INFO - codeparrot_training - Step 794: {'lr': 0.0004998096112375968, 'samples': 407040, 'steps': 794, 'batch_loss/train': 1.0378374606370926} +12/21/2021 17:05:34 - INFO - codeparrot_training - Step 795: {'lr': 0.0004998089956696384, 'samples': 407552, 'steps': 795, 'batch_loss/train': 0.9944586558267474} +12/21/2021 17:05:44 - INFO - codeparrot_training - Step 796: {'lr': 0.000499808379108534, 'samples': 408064, 'steps': 796, 'batch_loss/train': 0.9192905947566032} +12/21/2021 17:05:55 - INFO - codeparrot_training - Step 797: {'lr': 0.0004998077615542856, 'samples': 408576, 'steps': 797, 'batch_loss/train': 0.9454828109592199} +12/21/2021 17:06:05 - INFO - codeparrot_training - Step 798: {'lr': 0.0004998071430068959, 'samples': 409088, 'steps': 798, 'batch_loss/train': 0.8841877561062574} +12/21/2021 17:06:17 - INFO - codeparrot_training - Step 799: {'lr': 0.0004998065234663672, 'samples': 409600, 'steps': 799, 'batch_loss/train': 0.8881682259961963} +12/21/2021 17:06:28 - INFO - codeparrot_training - Step 800: {'lr': 0.0004998059029327023, 'samples': 410112, 'steps': 800, 'batch_loss/train': 0.9492154698818922} +12/21/2021 17:06:38 - INFO - codeparrot_training - Step 801: {'lr': 0.0004998052814059033, 'samples': 410624, 'steps': 801, 'batch_loss/train': 0.9140373067930341} +12/21/2021 17:06:51 - INFO - codeparrot_training - Step 802: {'lr': 0.000499804658885973, 'samples': 411136, 'steps': 802, 'batch_loss/train': 0.9074042178690434} +12/21/2021 17:07:02 - INFO - codeparrot_training - Step 803: {'lr': 0.0004998040353729135, 'samples': 411648, 'steps': 803, 'batch_loss/train': 0.889681194908917} +12/21/2021 17:07:13 - INFO - codeparrot_training - Step 804: {'lr': 0.0004998034108667276, 'samples': 412160, 'steps': 804, 'batch_loss/train': 0.9234187286347151} +12/21/2021 17:07:25 - INFO - codeparrot_training - Step 805: {'lr': 0.0004998027853674176, 'samples': 412672, 'steps': 805, 'batch_loss/train': 0.9126674877479672} +12/21/2021 17:07:35 - INFO - codeparrot_training - Step 806: {'lr': 0.0004998021588749861, 'samples': 413184, 'steps': 806, 'batch_loss/train': 1.1862736027687788} +12/21/2021 17:07:46 - INFO - codeparrot_training - Step 807: {'lr': 0.0004998015313894355, 'samples': 413696, 'steps': 807, 'batch_loss/train': 1.037706938572228} +12/21/2021 17:07:56 - INFO - codeparrot_training - Step 808: {'lr': 0.0004998009029107684, 'samples': 414208, 'steps': 808, 'batch_loss/train': 0.9012275338172913} +12/21/2021 17:08:08 - INFO - codeparrot_training - Step 809: {'lr': 0.0004998002734389872, 'samples': 414720, 'steps': 809, 'batch_loss/train': 0.8174186530523002} +12/21/2021 17:08:19 - INFO - codeparrot_training - Step 810: {'lr': 0.0004997996429740943, 'samples': 415232, 'steps': 810, 'batch_loss/train': 0.9302441347390413} +12/21/2021 17:08:30 - INFO - codeparrot_training - Step 811: {'lr': 0.0004997990115160925, 'samples': 415744, 'steps': 811, 'batch_loss/train': 0.9354924131184816} +12/21/2021 17:08:42 - INFO - codeparrot_training - Step 812: {'lr': 0.0004997983790649841, 'samples': 416256, 'steps': 812, 'batch_loss/train': 0.882003903388977} +12/21/2021 17:08:53 - INFO - codeparrot_training - Step 813: {'lr': 0.0004997977456207717, 'samples': 416768, 'steps': 813, 'batch_loss/train': 0.9220817005261779} +12/21/2021 17:09:04 - INFO - codeparrot_training - Step 814: {'lr': 0.0004997971111834577, 'samples': 417280, 'steps': 814, 'batch_loss/train': 0.9384055119007826} +12/21/2021 17:09:16 - INFO - codeparrot_training - Step 815: {'lr': 0.0004997964757530448, 'samples': 417792, 'steps': 815, 'batch_loss/train': 1.0483489474281669} +12/21/2021 17:09:26 - INFO - codeparrot_training - Step 816: {'lr': 0.0004997958393295354, 'samples': 418304, 'steps': 816, 'batch_loss/train': 1.0413115546107292} +12/21/2021 17:09:37 - INFO - codeparrot_training - Step 817: {'lr': 0.000499795201912932, 'samples': 418816, 'steps': 817, 'batch_loss/train': 0.8990647792816162} +12/21/2021 17:09:47 - INFO - codeparrot_training - Step 818: {'lr': 0.0004997945635032373, 'samples': 419328, 'steps': 818, 'batch_loss/train': 0.8802832514047623} +12/21/2021 17:10:00 - INFO - codeparrot_training - Step 819: {'lr': 0.0004997939241004536, 'samples': 419840, 'steps': 819, 'batch_loss/train': 0.9417443200945854} +12/21/2021 17:10:11 - INFO - codeparrot_training - Step 820: {'lr': 0.0004997932837045837, 'samples': 420352, 'steps': 820, 'batch_loss/train': 0.8611459489911795} +12/21/2021 17:10:21 - INFO - codeparrot_training - Step 821: {'lr': 0.00049979264231563, 'samples': 420864, 'steps': 821, 'batch_loss/train': 0.9343234058469534} +12/21/2021 17:10:33 - INFO - codeparrot_training - Step 822: {'lr': 0.000499791999933595, 'samples': 421376, 'steps': 822, 'batch_loss/train': 1.006358064711094} +12/21/2021 17:10:44 - INFO - codeparrot_training - Step 823: {'lr': 0.0004997913565584814, 'samples': 421888, 'steps': 823, 'batch_loss/train': 1.0345821371302009} +12/21/2021 17:10:54 - INFO - codeparrot_training - Step 824: {'lr': 0.0004997907121902917, 'samples': 422400, 'steps': 824, 'batch_loss/train': 0.8690118379890919} +12/21/2021 17:11:07 - INFO - codeparrot_training - Step 825: {'lr': 0.0004997900668290284, 'samples': 422912, 'steps': 825, 'batch_loss/train': 0.6611096791457385} +12/21/2021 17:11:17 - INFO - codeparrot_training - Step 826: {'lr': 0.0004997894204746941, 'samples': 423424, 'steps': 826, 'batch_loss/train': 0.8851624899543822} +12/21/2021 17:11:28 - INFO - codeparrot_training - Step 827: {'lr': 0.0004997887731272914, 'samples': 423936, 'steps': 827, 'batch_loss/train': 0.9128889078274369} +12/21/2021 17:11:40 - INFO - codeparrot_training - Step 828: {'lr': 0.0004997881247868228, 'samples': 424448, 'steps': 828, 'batch_loss/train': 0.7799935028888285} +12/21/2021 17:11:51 - INFO - codeparrot_training - Step 829: {'lr': 0.000499787475453291, 'samples': 424960, 'steps': 829, 'batch_loss/train': 0.9238556064665318} +12/21/2021 17:12:02 - INFO - codeparrot_training - Step 830: {'lr': 0.0004997868251266985, 'samples': 425472, 'steps': 830, 'batch_loss/train': 0.9082990828901529} +12/21/2021 17:12:12 - INFO - codeparrot_training - Step 831: {'lr': 0.000499786173807048, 'samples': 425984, 'steps': 831, 'batch_loss/train': 0.822741243056953} +12/21/2021 17:12:24 - INFO - codeparrot_training - Step 832: {'lr': 0.0004997855214943418, 'samples': 426496, 'steps': 832, 'batch_loss/train': 0.8597711115144193} +12/21/2021 17:12:35 - INFO - codeparrot_training - Step 833: {'lr': 0.0004997848681885828, 'samples': 427008, 'steps': 833, 'batch_loss/train': 0.9635952524840832} +12/21/2021 17:12:45 - INFO - codeparrot_training - Step 834: {'lr': 0.0004997842138897735, 'samples': 427520, 'steps': 834, 'batch_loss/train': 0.9419303433969617} +12/21/2021 17:12:57 - INFO - codeparrot_training - Step 835: {'lr': 0.0004997835585979164, 'samples': 428032, 'steps': 835, 'batch_loss/train': 0.9472830363083631} +12/21/2021 17:13:08 - INFO - codeparrot_training - Step 836: {'lr': 0.0004997829023130142, 'samples': 428544, 'steps': 836, 'batch_loss/train': 0.7724728945177048} +12/21/2021 17:13:19 - INFO - codeparrot_training - Step 837: {'lr': 0.0004997822450350695, 'samples': 429056, 'steps': 837, 'batch_loss/train': 0.8159578728955239} +12/21/2021 17:13:31 - INFO - codeparrot_training - Step 838: {'lr': 0.0004997815867640849, 'samples': 429568, 'steps': 838, 'batch_loss/train': 0.7640949334017932} +12/21/2021 17:13:41 - INFO - codeparrot_training - Step 839: {'lr': 0.0004997809275000631, 'samples': 430080, 'steps': 839, 'batch_loss/train': 0.9598758053034544} +12/21/2021 17:13:52 - INFO - codeparrot_training - Step 840: {'lr': 0.0004997802672430065, 'samples': 430592, 'steps': 840, 'batch_loss/train': 0.9455451741814613} +12/21/2021 17:14:05 - INFO - codeparrot_training - Step 841: {'lr': 0.000499779605992918, 'samples': 431104, 'steps': 841, 'batch_loss/train': 0.942421811632812} +12/21/2021 17:14:15 - INFO - codeparrot_training - Step 842: {'lr': 0.0004997789437498001, 'samples': 431616, 'steps': 842, 'batch_loss/train': 0.9513060636818409} +12/21/2021 17:14:26 - INFO - codeparrot_training - Step 843: {'lr': 0.0004997782805136554, 'samples': 432128, 'steps': 843, 'batch_loss/train': 1.0295728594064713} +12/21/2021 17:14:36 - INFO - codeparrot_training - Step 844: {'lr': 0.0004997776162844865, 'samples': 432640, 'steps': 844, 'batch_loss/train': 0.9750628145411611} +12/21/2021 17:14:49 - INFO - codeparrot_training - Step 845: {'lr': 0.0004997769510622961, 'samples': 433152, 'steps': 845, 'batch_loss/train': 0.7850676458328962} +12/21/2021 17:14:59 - INFO - codeparrot_training - Step 846: {'lr': 0.0004997762848470871, 'samples': 433664, 'steps': 846, 'batch_loss/train': 0.8023728863336146} +12/21/2021 17:15:10 - INFO - codeparrot_training - Step 847: {'lr': 0.0004997756176388615, 'samples': 434176, 'steps': 847, 'batch_loss/train': 0.984395582228899} +12/21/2021 17:15:22 - INFO - codeparrot_training - Step 848: {'lr': 0.0004997749494376227, 'samples': 434688, 'steps': 848, 'batch_loss/train': 0.9751801751554012} +12/21/2021 17:15:33 - INFO - codeparrot_training - Step 849: {'lr': 0.0004997742802433729, 'samples': 435200, 'steps': 849, 'batch_loss/train': 1.072255440056324} +12/21/2021 17:15:43 - INFO - codeparrot_training - Step 850: {'lr': 0.0004997736100561149, 'samples': 435712, 'steps': 850, 'batch_loss/train': 0.7036756607703865} +12/21/2021 17:15:56 - INFO - codeparrot_training - Step 851: {'lr': 0.0004997729388758513, 'samples': 436224, 'steps': 851, 'batch_loss/train': 0.7454155432060361} +12/21/2021 17:16:06 - INFO - codeparrot_training - Step 852: {'lr': 0.0004997722667025848, 'samples': 436736, 'steps': 852, 'batch_loss/train': 0.9254516665823758} +12/21/2021 17:16:17 - INFO - codeparrot_training - Step 853: {'lr': 0.0004997715935363181, 'samples': 437248, 'steps': 853, 'batch_loss/train': 1.0428108582273126} +12/21/2021 17:16:28 - INFO - codeparrot_training - Step 854: {'lr': 0.0004997709193770538, 'samples': 437760, 'steps': 854, 'batch_loss/train': 0.8779808301478624} +12/21/2021 17:16:40 - INFO - codeparrot_training - Step 855: {'lr': 0.0004997702442247947, 'samples': 438272, 'steps': 855, 'batch_loss/train': 0.9166898503899574} +12/21/2021 17:16:50 - INFO - codeparrot_training - Step 856: {'lr': 0.0004997695680795434, 'samples': 438784, 'steps': 856, 'batch_loss/train': 0.930611576884985} +12/21/2021 17:17:01 - INFO - codeparrot_training - Step 857: {'lr': 0.0004997688909413027, 'samples': 439296, 'steps': 857, 'batch_loss/train': 0.9523218180984259} +12/21/2021 17:17:13 - INFO - codeparrot_training - Step 858: {'lr': 0.000499768212810075, 'samples': 439808, 'steps': 858, 'batch_loss/train': 0.9139770325273275} +12/21/2021 17:17:24 - INFO - codeparrot_training - Step 859: {'lr': 0.0004997675336858633, 'samples': 440320, 'steps': 859, 'batch_loss/train': 0.8533319418784231} +12/21/2021 17:17:35 - INFO - codeparrot_training - Step 860: {'lr': 0.0004997668535686702, 'samples': 440832, 'steps': 860, 'batch_loss/train': 0.9894550524186343} +12/21/2021 17:17:48 - INFO - codeparrot_training - Step 861: {'lr': 0.0004997661724584984, 'samples': 441344, 'steps': 861, 'batch_loss/train': 1.8830384816974401} +12/21/2021 17:17:59 - INFO - codeparrot_training - Step 862: {'lr': 0.0004997654903553506, 'samples': 441856, 'steps': 862, 'batch_loss/train': 0.901781047694385} +12/21/2021 17:18:09 - INFO - codeparrot_training - Step 863: {'lr': 0.0004997648072592295, 'samples': 442368, 'steps': 863, 'batch_loss/train': 1.52697267010808} +12/21/2021 17:18:20 - INFO - codeparrot_training - Step 864: {'lr': 0.0004997641231701379, 'samples': 442880, 'steps': 864, 'batch_loss/train': 0.9294169563800097} +12/21/2021 17:18:32 - INFO - codeparrot_training - Step 865: {'lr': 0.0004997634380880784, 'samples': 443392, 'steps': 865, 'batch_loss/train': 1.5136930644512177} +12/21/2021 17:18:43 - INFO - codeparrot_training - Step 866: {'lr': 0.0004997627520130538, 'samples': 443904, 'steps': 866, 'batch_loss/train': 1.069660285487771} +12/21/2021 17:18:53 - INFO - codeparrot_training - Step 867: {'lr': 0.0004997620649450667, 'samples': 444416, 'steps': 867, 'batch_loss/train': 0.8720626179128885} +12/21/2021 17:19:06 - INFO - codeparrot_training - Step 868: {'lr': 0.0004997613768841201, 'samples': 444928, 'steps': 868, 'batch_loss/train': 0.884731070138514} +12/21/2021 17:19:16 - INFO - codeparrot_training - Step 869: {'lr': 0.0004997606878302164, 'samples': 445440, 'steps': 869, 'batch_loss/train': 0.9463777532801032} +12/21/2021 17:19:27 - INFO - codeparrot_training - Step 870: {'lr': 0.0004997599977833587, 'samples': 445952, 'steps': 870, 'batch_loss/train': 1.0486275823786855} +12/21/2021 17:19:39 - INFO - codeparrot_training - Step 871: {'lr': 0.0004997593067435494, 'samples': 446464, 'steps': 871, 'batch_loss/train': 0.9842685889452696} +12/21/2021 17:19:50 - INFO - codeparrot_training - Step 872: {'lr': 0.0004997586147107914, 'samples': 446976, 'steps': 872, 'batch_loss/train': 0.8689216449856758} +12/21/2021 17:20:01 - INFO - codeparrot_training - Step 873: {'lr': 0.0004997579216850875, 'samples': 447488, 'steps': 873, 'batch_loss/train': 0.9537707678973675} +12/21/2021 17:20:11 - INFO - codeparrot_training - Step 874: {'lr': 0.0004997572276664405, 'samples': 448000, 'steps': 874, 'batch_loss/train': 1.264841839671135} +12/21/2021 17:20:23 - INFO - codeparrot_training - Step 875: {'lr': 0.000499756532654853, 'samples': 448512, 'steps': 875, 'batch_loss/train': 0.9504139134660363} +12/21/2021 17:20:34 - INFO - codeparrot_training - Step 876: {'lr': 0.0004997558366503277, 'samples': 449024, 'steps': 876, 'batch_loss/train': 0.9323732294142246} +12/21/2021 17:20:45 - INFO - codeparrot_training - Step 877: {'lr': 0.0004997551396528677, 'samples': 449536, 'steps': 877, 'batch_loss/train': 1.0832994310185313} +12/21/2021 17:20:57 - INFO - codeparrot_training - Step 878: {'lr': 0.0004997544416624755, 'samples': 450048, 'steps': 878, 'batch_loss/train': 0.9073361391201615} +12/21/2021 17:21:08 - INFO - codeparrot_training - Step 879: {'lr': 0.0004997537426791539, 'samples': 450560, 'steps': 879, 'batch_loss/train': 0.8538142582401633} +12/21/2021 17:21:18 - INFO - codeparrot_training - Step 880: {'lr': 0.0004997530427029058, 'samples': 451072, 'steps': 880, 'batch_loss/train': 0.9367448491975665} +12/21/2021 17:21:30 - INFO - codeparrot_training - Step 881: {'lr': 0.0004997523417337339, 'samples': 451584, 'steps': 881, 'batch_loss/train': 0.9488799460232258} +12/21/2021 17:21:41 - INFO - codeparrot_training - Step 882: {'lr': 0.0004997516397716408, 'samples': 452096, 'steps': 882, 'batch_loss/train': 0.8153837828431278} +12/21/2021 17:21:52 - INFO - codeparrot_training - Step 883: {'lr': 0.0004997509368166297, 'samples': 452608, 'steps': 883, 'batch_loss/train': 0.903922101482749} +12/21/2021 17:22:02 - INFO - codeparrot_training - Step 884: {'lr': 0.0004997502328687031, 'samples': 453120, 'steps': 884, 'batch_loss/train': 1.0400319676846266} +12/21/2021 17:22:14 - INFO - codeparrot_training - Step 885: {'lr': 0.0004997495279278639, 'samples': 453632, 'steps': 885, 'batch_loss/train': 0.9313750742003322} +12/21/2021 17:22:25 - INFO - codeparrot_training - Step 886: {'lr': 0.0004997488219941149, 'samples': 454144, 'steps': 886, 'batch_loss/train': 0.8759532701224089} +12/21/2021 17:22:35 - INFO - codeparrot_training - Step 887: {'lr': 0.0004997481150674589, 'samples': 454656, 'steps': 887, 'batch_loss/train': 0.952106449753046} +12/21/2021 17:22:49 - INFO - codeparrot_training - Step 888: {'lr': 0.0004997474071478986, 'samples': 455168, 'steps': 888, 'batch_loss/train': 0.908934413921088} +12/21/2021 17:23:00 - INFO - codeparrot_training - Step 889: {'lr': 0.0004997466982354369, 'samples': 455680, 'steps': 889, 'batch_loss/train': 0.9488861933350563} +12/21/2021 17:23:10 - INFO - codeparrot_training - Step 890: {'lr': 0.0004997459883300766, 'samples': 456192, 'steps': 890, 'batch_loss/train': 1.2181328516453505} +12/21/2021 17:23:23 - INFO - codeparrot_training - Step 891: {'lr': 0.0004997452774318208, 'samples': 456704, 'steps': 891, 'batch_loss/train': 0.9329738602973521} +12/21/2021 17:23:33 - INFO - codeparrot_training - Step 892: {'lr': 0.0004997445655406718, 'samples': 457216, 'steps': 892, 'batch_loss/train': 0.9559244178235531} +12/21/2021 17:23:44 - INFO - codeparrot_training - Step 893: {'lr': 0.0004997438526566329, 'samples': 457728, 'steps': 893, 'batch_loss/train': 0.9444565968587995} +12/21/2021 17:23:56 - INFO - codeparrot_training - Step 894: {'lr': 0.0004997431387797067, 'samples': 458240, 'steps': 894, 'batch_loss/train': 0.9750812072306871} +12/21/2021 17:24:06 - INFO - codeparrot_training - Step 895: {'lr': 0.000499742423909896, 'samples': 458752, 'steps': 895, 'batch_loss/train': 0.9508182452991605} +12/21/2021 17:24:17 - INFO - codeparrot_training - Step 896: {'lr': 0.0004997417080472037, 'samples': 459264, 'steps': 896, 'batch_loss/train': 0.9594501657411456} +12/21/2021 17:24:27 - INFO - codeparrot_training - Step 897: {'lr': 0.0004997409911916327, 'samples': 459776, 'steps': 897, 'batch_loss/train': 0.9807299934327602} +12/21/2021 17:24:40 - INFO - codeparrot_training - Step 898: {'lr': 0.0004997402733431859, 'samples': 460288, 'steps': 898, 'batch_loss/train': 0.9721807986497879} +12/21/2021 17:24:51 - INFO - codeparrot_training - Step 899: {'lr': 0.000499739554501866, 'samples': 460800, 'steps': 899, 'batch_loss/train': 0.920583606697619} +12/21/2021 17:25:01 - INFO - codeparrot_training - Step 900: {'lr': 0.000499738834667676, 'samples': 461312, 'steps': 900, 'batch_loss/train': 0.9912521066144109} +12/21/2021 17:25:13 - INFO - codeparrot_training - Step 901: {'lr': 0.0004997381138406186, 'samples': 461824, 'steps': 901, 'batch_loss/train': 0.9627675246447325} +12/21/2021 17:25:24 - INFO - codeparrot_training - Step 902: {'lr': 0.0004997373920206968, 'samples': 462336, 'steps': 902, 'batch_loss/train': 0.9098580265417695} +12/21/2021 17:25:35 - INFO - codeparrot_training - Step 903: {'lr': 0.0004997366692079133, 'samples': 462848, 'steps': 903, 'batch_loss/train': 0.903676787391305} +12/21/2021 17:25:47 - INFO - codeparrot_training - Step 904: {'lr': 0.0004997359454022714, 'samples': 463360, 'steps': 904, 'batch_loss/train': 0.9083635397255421} +12/21/2021 17:25:58 - INFO - codeparrot_training - Step 905: {'lr': 0.0004997352206037734, 'samples': 463872, 'steps': 905, 'batch_loss/train': 0.853295112028718} +12/21/2021 17:26:09 - INFO - codeparrot_training - Step 906: {'lr': 0.0004997344948124226, 'samples': 464384, 'steps': 906, 'batch_loss/train': 1.2307321727275848} +12/21/2021 17:26:19 - INFO - codeparrot_training - Step 907: {'lr': 0.0004997337680282217, 'samples': 464896, 'steps': 907, 'batch_loss/train': 0.9160974379628897} +12/21/2021 17:26:31 - INFO - codeparrot_training - Step 908: {'lr': 0.0004997330402511736, 'samples': 465408, 'steps': 908, 'batch_loss/train': 1.1366801457479596} +12/21/2021 17:26:42 - INFO - codeparrot_training - Step 909: {'lr': 0.0004997323114812813, 'samples': 465920, 'steps': 909, 'batch_loss/train': 0.9090405153110623} +12/21/2021 17:26:52 - INFO - codeparrot_training - Step 910: {'lr': 0.0004997315817185476, 'samples': 466432, 'steps': 910, 'batch_loss/train': 1.0287680439651012} +12/21/2021 17:27:04 - INFO - codeparrot_training - Step 911: {'lr': 0.0004997308509629754, 'samples': 466944, 'steps': 911, 'batch_loss/train': 0.9808224998414516} +12/21/2021 17:27:15 - INFO - codeparrot_training - Step 912: {'lr': 0.0004997301192145677, 'samples': 467456, 'steps': 912, 'batch_loss/train': 0.8896083422005177} +12/21/2021 17:27:26 - INFO - codeparrot_training - Step 913: {'lr': 0.0004997293864733273, 'samples': 467968, 'steps': 913, 'batch_loss/train': 0.8836987032555044} +12/21/2021 17:27:38 - INFO - codeparrot_training - Step 914: {'lr': 0.000499728652739257, 'samples': 468480, 'steps': 914, 'batch_loss/train': 1.1301283570937812} +12/21/2021 17:27:49 - INFO - codeparrot_training - Step 915: {'lr': 0.0004997279180123601, 'samples': 468992, 'steps': 915, 'batch_loss/train': 0.8975952491164207} +12/21/2021 17:27:59 - INFO - codeparrot_training - Step 916: {'lr': 0.0004997271822926391, 'samples': 469504, 'steps': 916, 'batch_loss/train': 0.8759130109101534} +12/21/2021 17:28:10 - INFO - codeparrot_training - Step 917: {'lr': 0.0004997264455800974, 'samples': 470016, 'steps': 917, 'batch_loss/train': 0.9435654133558273} +12/21/2021 17:28:22 - INFO - codeparrot_training - Step 918: {'lr': 0.0004997257078747374, 'samples': 470528, 'steps': 918, 'batch_loss/train': 0.978057200089097} +12/21/2021 17:28:33 - INFO - codeparrot_training - Step 919: {'lr': 0.0004997249691765622, 'samples': 471040, 'steps': 919, 'batch_loss/train': 0.8530536172911525} +12/21/2021 17:28:43 - INFO - codeparrot_training - Step 920: {'lr': 0.0004997242294855751, 'samples': 471552, 'steps': 920, 'batch_loss/train': 0.8414009558036923} +12/21/2021 17:28:56 - INFO - codeparrot_training - Step 921: {'lr': 0.0004997234888017786, 'samples': 472064, 'steps': 921, 'batch_loss/train': 0.9589731092564762} +12/21/2021 17:29:07 - INFO - codeparrot_training - Step 922: {'lr': 0.0004997227471251758, 'samples': 472576, 'steps': 922, 'batch_loss/train': 0.8600756211671978} +12/21/2021 17:29:17 - INFO - codeparrot_training - Step 923: {'lr': 0.0004997220044557696, 'samples': 473088, 'steps': 923, 'batch_loss/train': 0.8780028335750103} +12/21/2021 17:29:29 - INFO - codeparrot_training - Step 924: {'lr': 0.0004997212607935631, 'samples': 473600, 'steps': 924, 'batch_loss/train': 0.8099083518609405} +12/21/2021 17:29:40 - INFO - codeparrot_training - Step 925: {'lr': 0.0004997205161385591, 'samples': 474112, 'steps': 925, 'batch_loss/train': 0.8519788309931755} +12/21/2021 17:29:50 - INFO - codeparrot_training - Step 926: {'lr': 0.0004997197704907607, 'samples': 474624, 'steps': 926, 'batch_loss/train': 0.8584920326247811} +12/21/2021 17:30:01 - INFO - codeparrot_training - Step 927: {'lr': 0.0004997190238501708, 'samples': 475136, 'steps': 927, 'batch_loss/train': 0.8563522147014737} +12/21/2021 17:30:13 - INFO - codeparrot_training - Step 928: {'lr': 0.0004997182762167922, 'samples': 475648, 'steps': 928, 'batch_loss/train': 0.9796495949849486} +12/21/2021 17:30:24 - INFO - codeparrot_training - Step 929: {'lr': 0.0004997175275906281, 'samples': 476160, 'steps': 929, 'batch_loss/train': 0.9414922697469592} +12/21/2021 17:30:34 - INFO - codeparrot_training - Step 930: {'lr': 0.0004997167779716814, 'samples': 476672, 'steps': 930, 'batch_loss/train': 0.9104800587520003} +12/21/2021 17:30:46 - INFO - codeparrot_training - Step 931: {'lr': 0.0004997160273599551, 'samples': 477184, 'steps': 931, 'batch_loss/train': 0.9091139989905059} +12/21/2021 17:30:57 - INFO - codeparrot_training - Step 932: {'lr': 0.0004997152757554522, 'samples': 477696, 'steps': 932, 'batch_loss/train': 0.9514064388349652} +12/21/2021 17:31:07 - INFO - codeparrot_training - Step 933: {'lr': 0.0004997145231581756, 'samples': 478208, 'steps': 933, 'batch_loss/train': 0.922889971639961} +12/21/2021 17:31:20 - INFO - codeparrot_training - Step 934: {'lr': 0.0004997137695681283, 'samples': 478720, 'steps': 934, 'batch_loss/train': 0.9761518789455295} +12/21/2021 17:31:31 - INFO - codeparrot_training - Step 935: {'lr': 0.0004997130149853134, 'samples': 479232, 'steps': 935, 'batch_loss/train': 0.7854829244315624} +12/21/2021 17:31:41 - INFO - codeparrot_training - Step 936: {'lr': 0.0004997122594097338, 'samples': 479744, 'steps': 936, 'batch_loss/train': 1.0057788996491581} +12/21/2021 17:31:52 - INFO - codeparrot_training - Step 937: {'lr': 0.0004997115028413926, 'samples': 480256, 'steps': 937, 'batch_loss/train': 1.1213493403047323} +12/21/2021 17:32:04 - INFO - codeparrot_training - Step 938: {'lr': 0.0004997107452802927, 'samples': 480768, 'steps': 938, 'batch_loss/train': 0.7906994260847569} +12/21/2021 17:32:15 - INFO - codeparrot_training - Step 939: {'lr': 0.0004997099867264372, 'samples': 481280, 'steps': 939, 'batch_loss/train': 0.959058933891356} +12/21/2021 17:32:25 - INFO - codeparrot_training - Step 940: {'lr': 0.0004997092271798291, 'samples': 481792, 'steps': 940, 'batch_loss/train': 0.8127758912742138} +12/21/2021 17:32:37 - INFO - codeparrot_training - Step 941: {'lr': 0.0004997084666404713, 'samples': 482304, 'steps': 941, 'batch_loss/train': 0.9607830392196774} +12/21/2021 17:32:48 - INFO - codeparrot_training - Step 942: {'lr': 0.0004997077051083669, 'samples': 482816, 'steps': 942, 'batch_loss/train': 0.9080006247386336} +12/21/2021 17:32:58 - INFO - codeparrot_training - Step 943: {'lr': 0.0004997069425835191, 'samples': 483328, 'steps': 943, 'batch_loss/train': 0.9566507814452052} +12/21/2021 17:33:11 - INFO - codeparrot_training - Step 944: {'lr': 0.0004997061790659307, 'samples': 483840, 'steps': 944, 'batch_loss/train': 1.0351978112012148} +12/21/2021 17:33:22 - INFO - codeparrot_training - Step 945: {'lr': 0.0004997054145556048, 'samples': 484352, 'steps': 945, 'batch_loss/train': 0.9912108490243554} +12/21/2021 17:33:32 - INFO - codeparrot_training - Step 946: {'lr': 0.0004997046490525446, 'samples': 484864, 'steps': 946, 'batch_loss/train': 0.9437259715050459} +12/21/2021 17:33:45 - INFO - codeparrot_training - Step 947: {'lr': 0.0004997038825567528, 'samples': 485376, 'steps': 947, 'batch_loss/train': 0.9389331797137856} +12/21/2021 17:33:55 - INFO - codeparrot_training - Step 948: {'lr': 0.0004997031150682328, 'samples': 485888, 'steps': 948, 'batch_loss/train': 0.7905911942943931} +12/21/2021 17:34:06 - INFO - codeparrot_training - Step 949: {'lr': 0.0004997023465869874, 'samples': 486400, 'steps': 949, 'batch_loss/train': 0.9152175458148122} +12/21/2021 17:34:16 - INFO - codeparrot_training - Step 950: {'lr': 0.0004997015771130198, 'samples': 486912, 'steps': 950, 'batch_loss/train': 1.284138262271881} +12/21/2021 17:34:29 - INFO - codeparrot_training - Step 951: {'lr': 0.000499700806646333, 'samples': 487424, 'steps': 951, 'batch_loss/train': 0.8678724225610495} +12/21/2021 17:34:40 - INFO - codeparrot_training - Step 952: {'lr': 0.00049970003518693, 'samples': 487936, 'steps': 952, 'batch_loss/train': 0.8930850867182016} +12/21/2021 17:34:50 - INFO - codeparrot_training - Step 953: {'lr': 0.0004996992627348141, 'samples': 488448, 'steps': 953, 'batch_loss/train': 0.9073094232007861} +12/21/2021 17:35:02 - INFO - codeparrot_training - Step 954: {'lr': 0.0004996984892899882, 'samples': 488960, 'steps': 954, 'batch_loss/train': 1.0472514182329178} +12/21/2021 17:35:13 - INFO - codeparrot_training - Step 955: {'lr': 0.0004996977148524553, 'samples': 489472, 'steps': 955, 'batch_loss/train': 0.9209820367395878} +12/21/2021 17:35:23 - INFO - codeparrot_training - Step 956: {'lr': 0.0004996969394222186, 'samples': 489984, 'steps': 956, 'batch_loss/train': 0.8026992455124855} +12/21/2021 17:35:35 - INFO - codeparrot_training - Step 957: {'lr': 0.0004996961629992811, 'samples': 490496, 'steps': 957, 'batch_loss/train': 0.9692586734890938} +12/21/2021 17:35:46 - INFO - codeparrot_training - Step 958: {'lr': 0.000499695385583646, 'samples': 491008, 'steps': 958, 'batch_loss/train': 0.9362279893830419} +12/21/2021 17:35:57 - INFO - codeparrot_training - Step 959: {'lr': 0.0004996946071753164, 'samples': 491520, 'steps': 959, 'batch_loss/train': 0.9494533007964492} +12/21/2021 17:36:07 - INFO - codeparrot_training - Step 960: {'lr': 0.0004996938277742951, 'samples': 492032, 'steps': 960, 'batch_loss/train': 0.8683849526569247} +12/21/2021 17:36:20 - INFO - codeparrot_training - Step 961: {'lr': 0.0004996930473805856, 'samples': 492544, 'steps': 961, 'batch_loss/train': 1.2063287291675806} +12/21/2021 17:36:30 - INFO - codeparrot_training - Step 962: {'lr': 0.0004996922659941908, 'samples': 493056, 'steps': 962, 'batch_loss/train': 0.8595601376146078} +12/21/2021 17:36:41 - INFO - codeparrot_training - Step 963: {'lr': 0.0004996914836151138, 'samples': 493568, 'steps': 963, 'batch_loss/train': 0.9041547570377588} +12/21/2021 17:36:53 - INFO - codeparrot_training - Step 964: {'lr': 0.0004996907002433578, 'samples': 494080, 'steps': 964, 'batch_loss/train': 0.8873012363910675} +12/21/2021 17:37:04 - INFO - codeparrot_training - Step 965: {'lr': 0.0004996899158789258, 'samples': 494592, 'steps': 965, 'batch_loss/train': 0.8391993469558656} +12/21/2021 17:37:14 - INFO - codeparrot_training - Step 966: {'lr': 0.000499689130521821, 'samples': 495104, 'steps': 966, 'batch_loss/train': 0.9376294063404202} +12/21/2021 17:37:26 - INFO - codeparrot_training - Step 967: {'lr': 0.0004996883441720464, 'samples': 495616, 'steps': 967, 'batch_loss/train': 1.32773732021451} +12/21/2021 17:37:37 - INFO - codeparrot_training - Step 968: {'lr': 0.0004996875568296053, 'samples': 496128, 'steps': 968, 'batch_loss/train': 0.9905015528202057} +12/21/2021 17:37:47 - INFO - codeparrot_training - Step 969: {'lr': 0.0004996867684945008, 'samples': 496640, 'steps': 969, 'batch_loss/train': 0.8725618738681078} +12/21/2021 17:37:58 - INFO - codeparrot_training - Step 970: {'lr': 0.0004996859791667359, 'samples': 497152, 'steps': 970, 'batch_loss/train': 1.0036595575511456} +12/21/2021 17:38:10 - INFO - codeparrot_training - Step 971: {'lr': 0.0004996851888463138, 'samples': 497664, 'steps': 971, 'batch_loss/train': 0.8835762604139745} +12/21/2021 17:38:21 - INFO - codeparrot_training - Step 972: {'lr': 0.0004996843975332378, 'samples': 498176, 'steps': 972, 'batch_loss/train': 1.055220203474164} +12/21/2021 17:38:31 - INFO - codeparrot_training - Step 973: {'lr': 0.0004996836052275109, 'samples': 498688, 'steps': 973, 'batch_loss/train': 0.9634850195143372} +12/21/2021 17:38:44 - INFO - codeparrot_training - Step 974: {'lr': 0.0004996828119291361, 'samples': 499200, 'steps': 974, 'batch_loss/train': 0.8381894044578075} +12/21/2021 17:38:55 - INFO - codeparrot_training - Step 975: {'lr': 0.0004996820176381169, 'samples': 499712, 'steps': 975, 'batch_loss/train': 0.9537183176726103} +12/21/2021 17:39:06 - INFO - codeparrot_training - Step 976: {'lr': 0.0004996812223544562, 'samples': 500224, 'steps': 976, 'batch_loss/train': 0.934873765334487} +12/21/2021 17:39:18 - INFO - codeparrot_training - Step 977: {'lr': 0.0004996804260781572, 'samples': 500736, 'steps': 977, 'batch_loss/train': 0.8153394656255841} +12/21/2021 17:39:28 - INFO - codeparrot_training - Step 978: {'lr': 0.0004996796288092232, 'samples': 501248, 'steps': 978, 'batch_loss/train': 0.9296532776206732} +12/21/2021 17:39:39 - INFO - codeparrot_training - Step 979: {'lr': 0.0004996788305476572, 'samples': 501760, 'steps': 979, 'batch_loss/train': 0.9139515142887831} +12/21/2021 17:39:51 - INFO - codeparrot_training - Step 980: {'lr': 0.0004996780312934623, 'samples': 502272, 'steps': 980, 'batch_loss/train': 0.9188459692522883} +12/21/2021 17:40:02 - INFO - codeparrot_training - Step 981: {'lr': 0.0004996772310466421, 'samples': 502784, 'steps': 981, 'batch_loss/train': 0.8332904735580087} +12/21/2021 17:40:13 - INFO - codeparrot_training - Step 982: {'lr': 0.0004996764298071993, 'samples': 503296, 'steps': 982, 'batch_loss/train': 0.9159372635185719} +12/21/2021 17:40:23 - INFO - codeparrot_training - Step 983: {'lr': 0.0004996756275751374, 'samples': 503808, 'steps': 983, 'batch_loss/train': 0.8658452806994319} +12/21/2021 17:40:35 - INFO - codeparrot_training - Step 984: {'lr': 0.0004996748243504594, 'samples': 504320, 'steps': 984, 'batch_loss/train': 1.006726504303515} +12/21/2021 17:40:46 - INFO - codeparrot_training - Step 985: {'lr': 0.0004996740201331687, 'samples': 504832, 'steps': 985, 'batch_loss/train': 0.9890728034079075} +12/21/2021 17:40:56 - INFO - codeparrot_training - Step 986: {'lr': 0.0004996732149232683, 'samples': 505344, 'steps': 986, 'batch_loss/train': 0.8526571863330901} +12/21/2021 17:41:08 - INFO - codeparrot_training - Step 987: {'lr': 0.0004996724087207614, 'samples': 505856, 'steps': 987, 'batch_loss/train': 0.9746766141615808} +12/21/2021 17:41:19 - INFO - codeparrot_training - Step 988: {'lr': 0.0004996716015256514, 'samples': 506368, 'steps': 988, 'batch_loss/train': 0.8539458839222789} +12/21/2021 17:41:30 - INFO - codeparrot_training - Step 989: {'lr': 0.0004996707933379413, 'samples': 506880, 'steps': 989, 'batch_loss/train': 0.9388692257925868} +12/21/2021 17:41:42 - INFO - codeparrot_training - Step 990: {'lr': 0.0004996699841576343, 'samples': 507392, 'steps': 990, 'batch_loss/train': 0.803133706562221} +12/21/2021 17:41:53 - INFO - codeparrot_training - Step 991: {'lr': 0.0004996691739847338, 'samples': 507904, 'steps': 991, 'batch_loss/train': 1.0021716505289078} +12/21/2021 17:42:03 - INFO - codeparrot_training - Step 992: {'lr': 0.000499668362819243, 'samples': 508416, 'steps': 992, 'batch_loss/train': 1.1028096228837967} +12/21/2021 17:42:14 - INFO - codeparrot_training - Step 993: {'lr': 0.0004996675506611651, 'samples': 508928, 'steps': 993, 'batch_loss/train': 0.9497033399529755} +12/21/2021 17:42:26 - INFO - codeparrot_training - Step 994: {'lr': 0.0004996667375105031, 'samples': 509440, 'steps': 994, 'batch_loss/train': 0.9420621516183019} +12/21/2021 17:42:37 - INFO - codeparrot_training - Step 995: {'lr': 0.0004996659233672606, 'samples': 509952, 'steps': 995, 'batch_loss/train': 0.9178862543776631} +12/21/2021 17:42:47 - INFO - codeparrot_training - Step 996: {'lr': 0.0004996651082314407, 'samples': 510464, 'steps': 996, 'batch_loss/train': 0.8658295627683401} +12/21/2021 17:42:59 - INFO - codeparrot_training - Step 997: {'lr': 0.0004996642921030464, 'samples': 510976, 'steps': 997, 'batch_loss/train': 0.8900093277916312} +12/21/2021 17:43:10 - INFO - codeparrot_training - Step 998: {'lr': 0.0004996634749820812, 'samples': 511488, 'steps': 998, 'batch_loss/train': 0.8054971592500806} +12/21/2021 17:43:20 - INFO - codeparrot_training - Step 999: {'lr': 0.0004996626568685484, 'samples': 512000, 'steps': 999, 'batch_loss/train': 0.9153449330478907} +12/21/2021 17:43:33 - INFO - codeparrot_training - Step 1000: {'lr': 0.000499661837762451, 'samples': 512512, 'steps': 1000, 'batch_loss/train': 0.9770119711756706} +12/21/2021 17:43:44 - INFO - codeparrot_training - Step 1001: {'lr': 0.0004996610176637925, 'samples': 513024, 'steps': 1001, 'batch_loss/train': 0.8659257451072335} +12/21/2021 17:43:54 - INFO - codeparrot_training - Step 1002: {'lr': 0.000499660196572576, 'samples': 513536, 'steps': 1002, 'batch_loss/train': 0.8665200937539339} +12/21/2021 17:44:05 - INFO - codeparrot_training - Step 1003: {'lr': 0.0004996593744888049, 'samples': 514048, 'steps': 1003, 'batch_loss/train': 0.8351971795782447} +12/21/2021 17:44:18 - INFO - codeparrot_training - Step 1004: {'lr': 0.0004996585514124823, 'samples': 514560, 'steps': 1004, 'batch_loss/train': 0.9125053212046623} +12/21/2021 17:44:28 - INFO - codeparrot_training - Step 1005: {'lr': 0.0004996577273436117, 'samples': 515072, 'steps': 1005, 'batch_loss/train': 1.001401899382472} +12/21/2021 17:44:39 - INFO - codeparrot_training - Step 1006: {'lr': 0.0004996569022821961, 'samples': 515584, 'steps': 1006, 'batch_loss/train': 1.61145652923733} +12/21/2021 17:44:51 - INFO - codeparrot_training - Step 1007: {'lr': 0.000499656076228239, 'samples': 516096, 'steps': 1007, 'batch_loss/train': 0.8782493341714144} +12/21/2021 17:45:01 - INFO - codeparrot_training - Step 1008: {'lr': 0.0004996552491817437, 'samples': 516608, 'steps': 1008, 'batch_loss/train': 0.9275859594345093} +12/21/2021 17:45:12 - INFO - codeparrot_training - Step 1009: {'lr': 0.0004996544211427132, 'samples': 517120, 'steps': 1009, 'batch_loss/train': 0.8456953288987279} +12/21/2021 17:45:24 - INFO - codeparrot_training - Step 1010: {'lr': 0.0004996535921111511, 'samples': 517632, 'steps': 1010, 'batch_loss/train': 0.9477060651406646} +12/21/2021 17:45:34 - INFO - codeparrot_training - Step 1011: {'lr': 0.0004996527620870606, 'samples': 518144, 'steps': 1011, 'batch_loss/train': 1.0580871552228928} +12/21/2021 17:45:45 - INFO - codeparrot_training - Step 1012: {'lr': 0.000499651931070445, 'samples': 518656, 'steps': 1012, 'batch_loss/train': 1.0251055853441358} +12/21/2021 17:45:58 - INFO - codeparrot_training - Step 1013: {'lr': 0.0004996510990613075, 'samples': 519168, 'steps': 1013, 'batch_loss/train': 0.9154602931812406} +12/21/2021 17:46:08 - INFO - codeparrot_training - Step 1014: {'lr': 0.0004996502660596516, 'samples': 519680, 'steps': 1014, 'batch_loss/train': 0.9666747972369194} +12/21/2021 17:46:19 - INFO - codeparrot_training - Step 1015: {'lr': 0.0004996494320654804, 'samples': 520192, 'steps': 1015, 'batch_loss/train': 0.8140917466953397} +12/21/2021 17:46:30 - INFO - codeparrot_training - Step 1016: {'lr': 0.0004996485970787973, 'samples': 520704, 'steps': 1016, 'batch_loss/train': 0.8170314254239202} +12/21/2021 17:46:42 - INFO - codeparrot_training - Step 1017: {'lr': 0.0004996477610996057, 'samples': 521216, 'steps': 1017, 'batch_loss/train': 0.9008469814434648} +12/21/2021 17:46:52 - INFO - codeparrot_training - Step 1018: {'lr': 0.0004996469241279089, 'samples': 521728, 'steps': 1018, 'batch_loss/train': 0.9004081226885319} +12/21/2021 17:47:03 - INFO - codeparrot_training - Step 1019: {'lr': 0.0004996460861637101, 'samples': 522240, 'steps': 1019, 'batch_loss/train': 0.8875172259286046} +12/21/2021 17:47:16 - INFO - codeparrot_training - Step 1020: {'lr': 0.0004996452472070128, 'samples': 522752, 'steps': 1020, 'batch_loss/train': 0.9761928748339415} +12/21/2021 17:47:26 - INFO - codeparrot_training - Step 1021: {'lr': 0.0004996444072578203, 'samples': 523264, 'steps': 1021, 'batch_loss/train': 1.049674330279231} +12/21/2021 17:47:37 - INFO - codeparrot_training - Step 1022: {'lr': 0.0004996435663161358, 'samples': 523776, 'steps': 1022, 'batch_loss/train': 0.8628543829545379} +12/21/2021 17:47:49 - INFO - codeparrot_training - Step 1023: {'lr': 0.0004996427243819627, 'samples': 524288, 'steps': 1023, 'batch_loss/train': 0.9352536166552454} +12/21/2021 17:47:59 - INFO - codeparrot_training - Step 1024: {'lr': 0.0004996418814553046, 'samples': 524800, 'steps': 1024, 'batch_loss/train': 0.9838276915252209} +12/21/2021 17:48:10 - INFO - codeparrot_training - Step 1025: {'lr': 0.0004996410375361645, 'samples': 525312, 'steps': 1025, 'batch_loss/train': 0.9914461933076382} +12/21/2021 17:48:21 - INFO - codeparrot_training - Step 1026: {'lr': 0.000499640192624546, 'samples': 525824, 'steps': 1026, 'batch_loss/train': 0.9910847740247846} +12/21/2021 17:48:33 - INFO - codeparrot_training - Step 1027: {'lr': 0.0004996393467204522, 'samples': 526336, 'steps': 1027, 'batch_loss/train': 0.9384469399228692} +12/21/2021 17:48:43 - INFO - codeparrot_training - Step 1028: {'lr': 0.0004996384998238867, 'samples': 526848, 'steps': 1028, 'batch_loss/train': 0.888360857963562} +12/21/2021 17:48:54 - INFO - codeparrot_training - Step 1029: {'lr': 0.0004996376519348527, 'samples': 527360, 'steps': 1029, 'batch_loss/train': 0.8908344563096762} +12/21/2021 17:49:06 - INFO - codeparrot_training - Step 1030: {'lr': 0.0004996368030533538, 'samples': 527872, 'steps': 1030, 'batch_loss/train': 0.9832698026439175} +12/21/2021 17:49:17 - INFO - codeparrot_training - Step 1031: {'lr': 0.0004996359531793931, 'samples': 528384, 'steps': 1031, 'batch_loss/train': 1.0083358958363533} +12/21/2021 17:49:28 - INFO - codeparrot_training - Step 1032: {'lr': 0.0004996351023129742, 'samples': 528896, 'steps': 1032, 'batch_loss/train': 0.8692763391882181} +12/21/2021 17:49:40 - INFO - codeparrot_training - Step 1033: {'lr': 0.0004996342504541004, 'samples': 529408, 'steps': 1033, 'batch_loss/train': 0.8394672274589539} +12/21/2021 17:49:50 - INFO - codeparrot_training - Step 1034: {'lr': 0.0004996333976027749, 'samples': 529920, 'steps': 1034, 'batch_loss/train': 0.846665873657912} +12/21/2021 17:50:01 - INFO - codeparrot_training - Step 1035: {'lr': 0.0004996325437590014, 'samples': 530432, 'steps': 1035, 'batch_loss/train': 0.9387017646804452} +12/21/2021 17:50:12 - INFO - codeparrot_training - Step 1036: {'lr': 0.0004996316889227832, 'samples': 530944, 'steps': 1036, 'batch_loss/train': 0.907019299454987} +12/21/2021 17:50:24 - INFO - codeparrot_training - Step 1037: {'lr': 0.0004996308330941236, 'samples': 531456, 'steps': 1037, 'batch_loss/train': 0.8694336637854576} +12/21/2021 17:50:35 - INFO - codeparrot_training - Step 1038: {'lr': 0.0004996299762730261, 'samples': 531968, 'steps': 1038, 'batch_loss/train': 0.9105223082005978} +12/21/2021 17:50:45 - INFO - codeparrot_training - Step 1039: {'lr': 0.0004996291184594941, 'samples': 532480, 'steps': 1039, 'batch_loss/train': 0.935341777279973} +12/21/2021 17:50:57 - INFO - codeparrot_training - Step 1040: {'lr': 0.0004996282596535309, 'samples': 532992, 'steps': 1040, 'batch_loss/train': 0.9475723467767239} +12/21/2021 17:51:08 - INFO - codeparrot_training - Step 1041: {'lr': 0.0004996273998551401, 'samples': 533504, 'steps': 1041, 'batch_loss/train': 0.8299167403019965} +12/21/2021 17:51:19 - INFO - codeparrot_training - Step 1042: {'lr': 0.000499626539064325, 'samples': 534016, 'steps': 1042, 'batch_loss/train': 0.8731899745762348} +12/21/2021 17:51:31 - INFO - codeparrot_training - Step 1043: {'lr': 0.0004996256772810889, 'samples': 534528, 'steps': 1043, 'batch_loss/train': 1.0501099117100239} +12/21/2021 17:51:41 - INFO - codeparrot_training - Step 1044: {'lr': 0.0004996248145054355, 'samples': 535040, 'steps': 1044, 'batch_loss/train': 0.8032674202695489} +12/21/2021 17:51:52 - INFO - codeparrot_training - Step 1045: {'lr': 0.000499623950737368, 'samples': 535552, 'steps': 1045, 'batch_loss/train': 0.9964918354526162} +12/21/2021 17:52:02 - INFO - codeparrot_training - Step 1046: {'lr': 0.00049962308597689, 'samples': 536064, 'steps': 1046, 'batch_loss/train': 0.8173797582276165} +12/21/2021 17:52:14 - INFO - codeparrot_training - Step 1047: {'lr': 0.0004996222202240049, 'samples': 536576, 'steps': 1047, 'batch_loss/train': 0.9599733343347907} +12/21/2021 17:52:25 - INFO - codeparrot_training - Step 1048: {'lr': 0.0004996213534787159, 'samples': 537088, 'steps': 1048, 'batch_loss/train': 0.8657295554876328} +12/21/2021 17:52:36 - INFO - codeparrot_training - Step 1049: {'lr': 0.0004996204857410268, 'samples': 537600, 'steps': 1049, 'batch_loss/train': 0.9591828566044569} +12/21/2021 17:52:49 - INFO - codeparrot_training - Step 1050: {'lr': 0.0004996196170109409, 'samples': 538112, 'steps': 1050, 'batch_loss/train': 0.9471572078764439} +12/21/2021 17:52:59 - INFO - codeparrot_training - Step 1051: {'lr': 0.0004996187472884616, 'samples': 538624, 'steps': 1051, 'batch_loss/train': 0.64387660427019} +12/21/2021 17:53:10 - INFO - codeparrot_training - Step 1052: {'lr': 0.0004996178765735925, 'samples': 539136, 'steps': 1052, 'batch_loss/train': 0.8945763395167887} +12/21/2021 17:53:22 - INFO - codeparrot_training - Step 1053: {'lr': 0.000499617004866337, 'samples': 539648, 'steps': 1053, 'batch_loss/train': 0.9317202400416136} +12/21/2021 17:53:32 - INFO - codeparrot_training - Step 1054: {'lr': 0.0004996161321666984, 'samples': 540160, 'steps': 1054, 'batch_loss/train': 0.8093121713027358} +12/21/2021 17:53:43 - INFO - codeparrot_training - Step 1055: {'lr': 0.0004996152584746805, 'samples': 540672, 'steps': 1055, 'batch_loss/train': 1.0307663017883897} +12/21/2021 17:53:55 - INFO - codeparrot_training - Step 1056: {'lr': 0.0004996143837902864, 'samples': 541184, 'steps': 1056, 'batch_loss/train': 0.9378761565312743} +12/21/2021 17:54:06 - INFO - codeparrot_training - Step 1057: {'lr': 0.0004996135081135199, 'samples': 541696, 'steps': 1057, 'batch_loss/train': 0.7767104115337133} +12/21/2021 17:54:16 - INFO - codeparrot_training - Step 1058: {'lr': 0.0004996126314443842, 'samples': 542208, 'steps': 1058, 'batch_loss/train': 0.9620478171855211} +12/21/2021 17:54:27 - INFO - codeparrot_training - Step 1059: {'lr': 0.0004996117537828831, 'samples': 542720, 'steps': 1059, 'batch_loss/train': 0.802382935769856} +12/21/2021 17:54:40 - INFO - codeparrot_training - Step 1060: {'lr': 0.0004996108751290198, 'samples': 543232, 'steps': 1060, 'batch_loss/train': 0.8702808013185859} +12/21/2021 17:54:50 - INFO - codeparrot_training - Step 1061: {'lr': 0.0004996099954827978, 'samples': 543744, 'steps': 1061, 'batch_loss/train': 0.9301826488226652} +12/21/2021 17:55:01 - INFO - codeparrot_training - Step 1062: {'lr': 0.0004996091148442209, 'samples': 544256, 'steps': 1062, 'batch_loss/train': 0.8478460044134408} +12/21/2021 17:55:13 - INFO - codeparrot_training - Step 1063: {'lr': 0.0004996082332132923, 'samples': 544768, 'steps': 1063, 'batch_loss/train': 0.8586355419829488} +12/21/2021 17:55:24 - INFO - codeparrot_training - Step 1064: {'lr': 0.0004996073505900157, 'samples': 545280, 'steps': 1064, 'batch_loss/train': 0.832656716927886} +12/21/2021 17:55:34 - INFO - codeparrot_training - Step 1065: {'lr': 0.0004996064669743945, 'samples': 545792, 'steps': 1065, 'batch_loss/train': 0.9472284931689501} +12/21/2021 17:55:47 - INFO - codeparrot_training - Step 1066: {'lr': 0.0004996055823664322, 'samples': 546304, 'steps': 1066, 'batch_loss/train': 0.8303265804424882} +12/21/2021 17:55:57 - INFO - codeparrot_training - Step 1067: {'lr': 0.0004996046967661324, 'samples': 546816, 'steps': 1067, 'batch_loss/train': 0.9168522106483579} +12/21/2021 17:56:08 - INFO - codeparrot_training - Step 1068: {'lr': 0.0004996038101734984, 'samples': 547328, 'steps': 1068, 'batch_loss/train': 0.8720788052305579} +12/21/2021 17:56:20 - INFO - codeparrot_training - Step 1069: {'lr': 0.0004996029225885341, 'samples': 547840, 'steps': 1069, 'batch_loss/train': 0.9396663988009095} +12/21/2021 17:56:31 - INFO - codeparrot_training - Step 1070: {'lr': 0.0004996020340112427, 'samples': 548352, 'steps': 1070, 'batch_loss/train': 0.8944519702345133} +12/21/2021 17:56:42 - INFO - codeparrot_training - Step 1071: {'lr': 0.0004996011444416279, 'samples': 548864, 'steps': 1071, 'batch_loss/train': 0.8981921337544918} +12/21/2021 17:56:52 - INFO - codeparrot_training - Step 1072: {'lr': 0.0004996002538796933, 'samples': 549376, 'steps': 1072, 'batch_loss/train': 0.8412441415712237} +12/21/2021 17:57:04 - INFO - codeparrot_training - Step 1073: {'lr': 0.0004995993623254422, 'samples': 549888, 'steps': 1073, 'batch_loss/train': 0.8462172476574779} +12/21/2021 17:57:15 - INFO - codeparrot_training - Step 1074: {'lr': 0.0004995984697788784, 'samples': 550400, 'steps': 1074, 'batch_loss/train': 0.9077871618792415} +12/21/2021 17:57:26 - INFO - codeparrot_training - Step 1075: {'lr': 0.0004995975762400052, 'samples': 550912, 'steps': 1075, 'batch_loss/train': 0.7821190003305674} +12/21/2021 17:57:38 - INFO - codeparrot_training - Step 1076: {'lr': 0.0004995966817088265, 'samples': 551424, 'steps': 1076, 'batch_loss/train': 0.9895831868052483} +12/21/2021 17:57:49 - INFO - codeparrot_training - Step 1077: {'lr': 0.0004995957861853454, 'samples': 551936, 'steps': 1077, 'batch_loss/train': 0.851031650789082} +12/21/2021 17:57:59 - INFO - codeparrot_training - Step 1078: {'lr': 0.0004995948896695658, 'samples': 552448, 'steps': 1078, 'batch_loss/train': 0.8771517518907785} +12/21/2021 17:58:11 - INFO - codeparrot_training - Step 1079: {'lr': 0.0004995939921614911, 'samples': 552960, 'steps': 1079, 'batch_loss/train': 0.9658382544294} +12/21/2021 17:58:22 - INFO - codeparrot_training - Step 1080: {'lr': 0.000499593093661125, 'samples': 553472, 'steps': 1080, 'batch_loss/train': 0.8046531332656741} +12/21/2021 17:58:33 - INFO - codeparrot_training - Step 1081: {'lr': 0.0004995921941684709, 'samples': 553984, 'steps': 1081, 'batch_loss/train': 0.9565259134396911} +12/21/2021 17:58:43 - INFO - codeparrot_training - Step 1082: {'lr': 0.0004995912936835326, 'samples': 554496, 'steps': 1082, 'batch_loss/train': 0.7734261483419687} +12/21/2021 17:58:55 - INFO - codeparrot_training - Step 1083: {'lr': 0.0004995903922063135, 'samples': 555008, 'steps': 1083, 'batch_loss/train': 0.9082761164754629} +12/21/2021 17:59:06 - INFO - codeparrot_training - Step 1084: {'lr': 0.0004995894897368173, 'samples': 555520, 'steps': 1084, 'batch_loss/train': 0.9238338135182858} +12/21/2021 17:59:17 - INFO - codeparrot_training - Step 1085: {'lr': 0.0004995885862750474, 'samples': 556032, 'steps': 1085, 'batch_loss/train': 0.8493210365995765} +12/21/2021 17:59:29 - INFO - codeparrot_training - Step 1086: {'lr': 0.0004995876818210077, 'samples': 556544, 'steps': 1086, 'batch_loss/train': 0.9383063027635217} +12/21/2021 17:59:39 - INFO - codeparrot_training - Step 1087: {'lr': 0.0004995867763747015, 'samples': 557056, 'steps': 1087, 'batch_loss/train': 0.8494196771644056} +12/21/2021 17:59:50 - INFO - codeparrot_training - Step 1088: {'lr': 0.0004995858699361324, 'samples': 557568, 'steps': 1088, 'batch_loss/train': 0.8711388576775789} +12/21/2021 18:00:03 - INFO - codeparrot_training - Step 1089: {'lr': 0.0004995849625053044, 'samples': 558080, 'steps': 1089, 'batch_loss/train': 0.7956964373588562} +12/21/2021 18:00:14 - INFO - codeparrot_training - Step 1090: {'lr': 0.0004995840540822207, 'samples': 558592, 'steps': 1090, 'batch_loss/train': 0.6550719826482236} +12/21/2021 18:00:24 - INFO - codeparrot_training - Step 1091: {'lr': 0.000499583144666885, 'samples': 559104, 'steps': 1091, 'batch_loss/train': 0.8832875224761665} +12/21/2021 18:00:35 - INFO - codeparrot_training - Step 1092: {'lr': 0.0004995822342593009, 'samples': 559616, 'steps': 1092, 'batch_loss/train': 0.9499610029160976} +12/21/2021 18:00:47 - INFO - codeparrot_training - Step 1093: {'lr': 0.0004995813228594723, 'samples': 560128, 'steps': 1093, 'batch_loss/train': 0.9427025811746716} +12/21/2021 18:00:58 - INFO - codeparrot_training - Step 1094: {'lr': 0.0004995804104674024, 'samples': 560640, 'steps': 1094, 'batch_loss/train': 0.9450205909088254} +12/21/2021 18:01:08 - INFO - codeparrot_training - Step 1095: {'lr': 0.0004995794970830951, 'samples': 561152, 'steps': 1095, 'batch_loss/train': 0.7671683279331774} +12/21/2021 18:01:21 - INFO - codeparrot_training - Step 1096: {'lr': 0.0004995785827065539, 'samples': 561664, 'steps': 1096, 'batch_loss/train': 0.8087856397032738} +12/21/2021 18:01:32 - INFO - codeparrot_training - Step 1097: {'lr': 0.0004995776673377825, 'samples': 562176, 'steps': 1097, 'batch_loss/train': 0.8797249929048121} +12/21/2021 18:01:42 - INFO - codeparrot_training - Step 1098: {'lr': 0.0004995767509767846, 'samples': 562688, 'steps': 1098, 'batch_loss/train': 0.9031044449657202} +12/21/2021 18:01:55 - INFO - codeparrot_training - Step 1099: {'lr': 0.0004995758336235638, 'samples': 563200, 'steps': 1099, 'batch_loss/train': 0.9182086293585598} +12/21/2021 18:02:06 - INFO - codeparrot_training - Step 1100: {'lr': 0.0004995749152781236, 'samples': 563712, 'steps': 1100, 'batch_loss/train': 0.8950230367481709} +12/21/2021 18:02:17 - INFO - codeparrot_training - Step 1101: {'lr': 0.0004995739959404678, 'samples': 564224, 'steps': 1101, 'batch_loss/train': 0.9475566386245191} +12/21/2021 18:02:27 - INFO - codeparrot_training - Step 1102: {'lr': 0.0004995730756106, 'samples': 564736, 'steps': 1102, 'batch_loss/train': 1.1874813102185726} +12/21/2021 18:02:39 - INFO - codeparrot_training - Step 1103: {'lr': 0.000499572154288524, 'samples': 565248, 'steps': 1103, 'batch_loss/train': 1.7612678539007902} +12/21/2021 18:02:50 - INFO - codeparrot_training - Step 1104: {'lr': 0.0004995712319742433, 'samples': 565760, 'steps': 1104, 'batch_loss/train': 1.2077126186341047} +12/21/2021 18:03:00 - INFO - codeparrot_training - Step 1105: {'lr': 0.0004995703086677617, 'samples': 566272, 'steps': 1105, 'batch_loss/train': 0.9749624468386173} +12/21/2021 18:03:13 - INFO - codeparrot_training - Step 1106: {'lr': 0.0004995693843690826, 'samples': 566784, 'steps': 1106, 'batch_loss/train': 0.9623480718582869} +12/21/2021 18:03:24 - INFO - codeparrot_training - Step 1107: {'lr': 0.0004995684590782101, 'samples': 567296, 'steps': 1107, 'batch_loss/train': 0.9023960018530488} +12/21/2021 18:03:34 - INFO - codeparrot_training - Step 1108: {'lr': 0.0004995675327951474, 'samples': 567808, 'steps': 1108, 'batch_loss/train': 0.8568100268021226} +12/21/2021 18:03:46 - INFO - codeparrot_training - Step 1109: {'lr': 0.0004995666055198986, 'samples': 568320, 'steps': 1109, 'batch_loss/train': 0.9364044480025768} +12/21/2021 18:03:57 - INFO - codeparrot_training - Step 1110: {'lr': 0.000499565677252467, 'samples': 568832, 'steps': 1110, 'batch_loss/train': 0.8646228360012174} +12/21/2021 18:04:07 - INFO - codeparrot_training - Step 1111: {'lr': 0.0004995647479928566, 'samples': 569344, 'steps': 1111, 'batch_loss/train': 0.9255408979952335} +12/21/2021 18:04:18 - INFO - codeparrot_training - Step 1112: {'lr': 0.0004995638177410711, 'samples': 569856, 'steps': 1112, 'batch_loss/train': 1.0486212018877268} +12/21/2021 18:04:30 - INFO - codeparrot_training - Step 1113: {'lr': 0.000499562886497114, 'samples': 570368, 'steps': 1113, 'batch_loss/train': 1.0237348061054945} +12/21/2021 18:04:41 - INFO - codeparrot_training - Step 1114: {'lr': 0.0004995619542609892, 'samples': 570880, 'steps': 1114, 'batch_loss/train': 0.8184514958411455} +12/21/2021 18:04:52 - INFO - codeparrot_training - Step 1115: {'lr': 0.0004995610210327001, 'samples': 571392, 'steps': 1115, 'batch_loss/train': 0.865839023143053} +12/21/2021 18:05:04 - INFO - codeparrot_training - Step 1116: {'lr': 0.0004995600868122508, 'samples': 571904, 'steps': 1116, 'batch_loss/train': 0.8920089188031852} +12/21/2021 18:05:15 - INFO - codeparrot_training - Step 1117: {'lr': 0.0004995591515996447, 'samples': 572416, 'steps': 1117, 'batch_loss/train': 0.9018277155701071} +12/21/2021 18:05:25 - INFO - codeparrot_training - Step 1118: {'lr': 0.0004995582153948857, 'samples': 572928, 'steps': 1118, 'batch_loss/train': 1.4468217734247446} +12/21/2021 18:05:37 - INFO - codeparrot_training - Step 1119: {'lr': 0.0004995572781979774, 'samples': 573440, 'steps': 1119, 'batch_loss/train': 0.9325792817398906} +12/21/2021 18:05:48 - INFO - codeparrot_training - Step 1120: {'lr': 0.0004995563400089238, 'samples': 573952, 'steps': 1120, 'batch_loss/train': 0.9161987453699112} +12/21/2021 18:05:59 - INFO - codeparrot_training - Step 1121: {'lr': 0.0004995554008277283, 'samples': 574464, 'steps': 1121, 'batch_loss/train': 0.9810406556352973} +12/21/2021 18:06:09 - INFO - codeparrot_training - Step 1122: {'lr': 0.0004995544606543947, 'samples': 574976, 'steps': 1122, 'batch_loss/train': 0.9154063882306218} +12/21/2021 18:06:21 - INFO - codeparrot_training - Step 1123: {'lr': 0.0004995535194889267, 'samples': 575488, 'steps': 1123, 'batch_loss/train': 0.7840563806239516} +12/21/2021 18:06:32 - INFO - codeparrot_training - Step 1124: {'lr': 0.0004995525773313283, 'samples': 576000, 'steps': 1124, 'batch_loss/train': 0.9391268035396934} +12/21/2021 18:06:42 - INFO - codeparrot_training - Step 1125: {'lr': 0.000499551634181603, 'samples': 576512, 'steps': 1125, 'batch_loss/train': 0.9383440567180514} +12/21/2021 18:06:55 - INFO - codeparrot_training - Step 1126: {'lr': 0.0004995506900397547, 'samples': 577024, 'steps': 1126, 'batch_loss/train': 0.9530966784805059} +12/21/2021 18:07:06 - INFO - codeparrot_training - Step 1127: {'lr': 0.0004995497449057871, 'samples': 577536, 'steps': 1127, 'batch_loss/train': 0.988777719438076} +12/21/2021 18:07:16 - INFO - codeparrot_training - Step 1128: {'lr': 0.0004995487987797038, 'samples': 578048, 'steps': 1128, 'batch_loss/train': 0.8927262420766056} +12/21/2021 18:07:29 - INFO - codeparrot_training - Step 1129: {'lr': 0.0004995478516615088, 'samples': 578560, 'steps': 1129, 'batch_loss/train': 0.9784006746485829} +12/21/2021 18:07:39 - INFO - codeparrot_training - Step 1130: {'lr': 0.0004995469035512058, 'samples': 579072, 'steps': 1130, 'batch_loss/train': 0.866100194863975} +12/21/2021 18:07:50 - INFO - codeparrot_training - Step 1131: {'lr': 0.0004995459544487984, 'samples': 579584, 'steps': 1131, 'batch_loss/train': 0.7437573652714491} +12/21/2021 18:08:00 - INFO - codeparrot_training - Step 1132: {'lr': 0.0004995450043542906, 'samples': 580096, 'steps': 1132, 'batch_loss/train': 0.9188604271039367} +12/21/2021 18:08:12 - INFO - codeparrot_training - Step 1133: {'lr': 0.0004995440532676861, 'samples': 580608, 'steps': 1133, 'batch_loss/train': 0.9555202294141054} +12/21/2021 18:08:23 - INFO - codeparrot_training - Step 1134: {'lr': 0.0004995431011889887, 'samples': 581120, 'steps': 1134, 'batch_loss/train': 0.9115732107311487} +12/21/2021 18:08:34 - INFO - codeparrot_training - Step 1135: {'lr': 0.0004995421481182021, 'samples': 581632, 'steps': 1135, 'batch_loss/train': 0.8363559860736132} +12/21/2021 18:08:46 - INFO - codeparrot_training - Step 1136: {'lr': 0.0004995411940553301, 'samples': 582144, 'steps': 1136, 'batch_loss/train': 0.8803338319994509} +12/21/2021 18:08:57 - INFO - codeparrot_training - Step 1137: {'lr': 0.0004995402390003767, 'samples': 582656, 'steps': 1137, 'batch_loss/train': 0.9504870409145951} +12/21/2021 18:09:08 - INFO - codeparrot_training - Step 1138: {'lr': 0.0004995392829533453, 'samples': 583168, 'steps': 1138, 'batch_loss/train': 0.8740635262802243} +12/21/2021 18:09:20 - INFO - codeparrot_training - Step 1139: {'lr': 0.0004995383259142401, 'samples': 583680, 'steps': 1139, 'batch_loss/train': 0.9069474171847105} +12/21/2021 18:09:30 - INFO - codeparrot_training - Step 1140: {'lr': 0.0004995373678830647, 'samples': 584192, 'steps': 1140, 'batch_loss/train': 0.6965680932626128} +12/21/2021 18:09:41 - INFO - codeparrot_training - Step 1141: {'lr': 0.000499536408859823, 'samples': 584704, 'steps': 1141, 'batch_loss/train': 0.8678837665356696} +12/21/2021 18:09:53 - INFO - codeparrot_training - Step 1142: {'lr': 0.0004995354488445186, 'samples': 585216, 'steps': 1142, 'batch_loss/train': 0.9365042299032211} +12/21/2021 18:10:04 - INFO - codeparrot_training - Step 1143: {'lr': 0.0004995344878371556, 'samples': 585728, 'steps': 1143, 'batch_loss/train': 0.952761891297996} +12/21/2021 18:10:14 - INFO - codeparrot_training - Step 1144: {'lr': 0.0004995335258377377, 'samples': 586240, 'steps': 1144, 'batch_loss/train': 0.9256991683505476} +12/21/2021 18:10:25 - INFO - codeparrot_training - Step 1145: {'lr': 0.0004995325628462687, 'samples': 586752, 'steps': 1145, 'batch_loss/train': 0.9593670899048448} +12/21/2021 18:10:38 - INFO - codeparrot_training - Step 1146: {'lr': 0.0004995315988627525, 'samples': 587264, 'steps': 1146, 'batch_loss/train': 0.8608019361272454} +12/21/2021 18:10:48 - INFO - codeparrot_training - Step 1147: {'lr': 0.0004995306338871928, 'samples': 587776, 'steps': 1147, 'batch_loss/train': 0.9922945331782103} +12/21/2021 18:10:59 - INFO - codeparrot_training - Step 1148: {'lr': 0.0004995296679195935, 'samples': 588288, 'steps': 1148, 'batch_loss/train': 0.8341850200667977} +12/21/2021 18:11:11 - INFO - codeparrot_training - Step 1149: {'lr': 0.0004995287009599585, 'samples': 588800, 'steps': 1149, 'batch_loss/train': 0.8272012053057551} +12/21/2021 18:11:21 - INFO - codeparrot_training - Step 1150: {'lr': 0.0004995277330082916, 'samples': 589312, 'steps': 1150, 'batch_loss/train': 0.8838570918887854} +12/21/2021 18:11:32 - INFO - codeparrot_training - Step 1151: {'lr': 0.0004995267640645967, 'samples': 589824, 'steps': 1151, 'batch_loss/train': 0.8952997587621212} +12/21/2021 18:11:45 - INFO - codeparrot_training - Step 1152: {'lr': 0.0004995257941288776, 'samples': 590336, 'steps': 1152, 'batch_loss/train': 0.8195746010169387} +12/21/2021 18:11:56 - INFO - codeparrot_training - Step 1153: {'lr': 0.0004995248232011381, 'samples': 590848, 'steps': 1153, 'batch_loss/train': 0.9236949887126684} +12/21/2021 18:12:06 - INFO - codeparrot_training - Step 1154: {'lr': 0.0004995238512813821, 'samples': 591360, 'steps': 1154, 'batch_loss/train': 0.9074621677864343} +12/21/2021 18:12:17 - INFO - codeparrot_training - Step 1155: {'lr': 0.0004995228783696136, 'samples': 591872, 'steps': 1155, 'batch_loss/train': 0.8963299728929996} +12/21/2021 18:12:30 - INFO - codeparrot_training - Step 1156: {'lr': 0.0004995219044658363, 'samples': 592384, 'steps': 1156, 'batch_loss/train': 0.8924400648102164} +12/21/2021 18:12:40 - INFO - codeparrot_training - Step 1157: {'lr': 0.000499520929570054, 'samples': 592896, 'steps': 1157, 'batch_loss/train': 0.9086917554959655} +12/21/2021 18:12:51 - INFO - codeparrot_training - Step 1158: {'lr': 0.0004995199536822709, 'samples': 593408, 'steps': 1158, 'batch_loss/train': 1.4081650339066982} +12/21/2021 18:13:03 - INFO - codeparrot_training - Step 1159: {'lr': 0.0004995189768024905, 'samples': 593920, 'steps': 1159, 'batch_loss/train': 0.9024025881662965} +12/21/2021 18:13:13 - INFO - codeparrot_training - Step 1160: {'lr': 0.0004995179989307169, 'samples': 594432, 'steps': 1160, 'batch_loss/train': 0.8904345347546041} +12/21/2021 18:13:24 - INFO - codeparrot_training - Step 1161: {'lr': 0.0004995170200669541, 'samples': 594944, 'steps': 1161, 'batch_loss/train': 0.7745180642232299} +12/21/2021 18:13:35 - INFO - codeparrot_training - Step 1162: {'lr': 0.0004995160402112057, 'samples': 595456, 'steps': 1162, 'batch_loss/train': 1.1850290074944496} +12/21/2021 18:13:47 - INFO - codeparrot_training - Step 1163: {'lr': 0.0004995150593634758, 'samples': 595968, 'steps': 1163, 'batch_loss/train': 0.950513401068747} +12/21/2021 18:13:57 - INFO - codeparrot_training - Step 1164: {'lr': 0.0004995140775237682, 'samples': 596480, 'steps': 1164, 'batch_loss/train': 0.9199875956401229} +12/21/2021 18:14:08 - INFO - codeparrot_training - Step 1165: {'lr': 0.0004995130946920868, 'samples': 596992, 'steps': 1165, 'batch_loss/train': 1.0683848885819316} +12/21/2021 18:14:21 - INFO - codeparrot_training - Step 1166: {'lr': 0.0004995121108684357, 'samples': 597504, 'steps': 1166, 'batch_loss/train': 0.9540066784247756} +12/21/2021 18:14:31 - INFO - codeparrot_training - Step 1167: {'lr': 0.0004995111260528185, 'samples': 598016, 'steps': 1167, 'batch_loss/train': 0.930673093535006} +12/21/2021 18:14:42 - INFO - codeparrot_training - Step 1168: {'lr': 0.0004995101402452394, 'samples': 598528, 'steps': 1168, 'batch_loss/train': 0.9216750035993755} +12/21/2021 18:14:54 - INFO - codeparrot_training - Step 1169: {'lr': 0.000499509153445702, 'samples': 599040, 'steps': 1169, 'batch_loss/train': 0.8992116237059236} +12/21/2021 18:15:05 - INFO - codeparrot_training - Step 1170: {'lr': 0.0004995081656542106, 'samples': 599552, 'steps': 1170, 'batch_loss/train': 0.9409580049104989} +12/21/2021 18:15:15 - INFO - codeparrot_training - Step 1171: {'lr': 0.0004995071768707688, 'samples': 600064, 'steps': 1171, 'batch_loss/train': 0.8488235464319587} +12/21/2021 18:15:28 - INFO - codeparrot_training - Step 1172: {'lr': 0.0004995061870953809, 'samples': 600576, 'steps': 1172, 'batch_loss/train': 0.966129869222641} +12/21/2021 18:15:38 - INFO - codeparrot_training - Step 1173: {'lr': 0.0004995051963280504, 'samples': 601088, 'steps': 1173, 'batch_loss/train': 0.9478419600054622} +12/21/2021 18:15:49 - INFO - codeparrot_training - Step 1174: {'lr': 0.0004995042045687815, 'samples': 601600, 'steps': 1174, 'batch_loss/train': 0.7674035511445254} +12/21/2021 18:15:59 - INFO - codeparrot_training - Step 1175: {'lr': 0.0004995032118175782, 'samples': 602112, 'steps': 1175, 'batch_loss/train': 0.8185501310508698} +12/21/2021 18:16:12 - INFO - codeparrot_training - Step 1176: {'lr': 0.0004995022180744442, 'samples': 602624, 'steps': 1176, 'batch_loss/train': 0.9788629738613963} +12/21/2021 18:16:23 - INFO - codeparrot_training - Step 1177: {'lr': 0.0004995012233393836, 'samples': 603136, 'steps': 1177, 'batch_loss/train': 0.9133735606446862} +12/21/2021 18:16:34 - INFO - codeparrot_training - Step 1178: {'lr': 0.0004995002276124004, 'samples': 603648, 'steps': 1178, 'batch_loss/train': 1.0913109579123557} +12/21/2021 18:16:46 - INFO - codeparrot_training - Step 1179: {'lr': 0.0004994992308934985, 'samples': 604160, 'steps': 1179, 'batch_loss/train': 0.9067457905039191} +12/21/2021 18:16:56 - INFO - codeparrot_training - Step 1180: {'lr': 0.0004994982331826818, 'samples': 604672, 'steps': 1180, 'batch_loss/train': 0.9443781683221459} +12/21/2021 18:17:07 - INFO - codeparrot_training - Step 1181: {'lr': 0.0004994972344799544, 'samples': 605184, 'steps': 1181, 'batch_loss/train': 1.003391232341528} +12/21/2021 18:17:20 - INFO - codeparrot_training - Step 1182: {'lr': 0.0004994962347853203, 'samples': 605696, 'steps': 1182, 'batch_loss/train': 0.9118214081972837} +12/21/2021 18:17:30 - INFO - codeparrot_training - Step 1183: {'lr': 0.0004994952340987832, 'samples': 606208, 'steps': 1183, 'batch_loss/train': 0.9671361288055778} +12/21/2021 18:17:41 - INFO - codeparrot_training - Step 1184: {'lr': 0.0004994942324203474, 'samples': 606720, 'steps': 1184, 'batch_loss/train': 0.9741847687400877} +12/21/2021 18:17:51 - INFO - codeparrot_training - Step 1185: {'lr': 0.0004994932297500166, 'samples': 607232, 'steps': 1185, 'batch_loss/train': 0.9206735617481172} +12/21/2021 18:18:03 - INFO - codeparrot_training - Step 1186: {'lr': 0.000499492226087795, 'samples': 607744, 'steps': 1186, 'batch_loss/train': 0.896538395434618} +12/21/2021 18:18:14 - INFO - codeparrot_training - Step 1187: {'lr': 0.0004994912214336865, 'samples': 608256, 'steps': 1187, 'batch_loss/train': 0.951688070781529} +12/21/2021 18:18:25 - INFO - codeparrot_training - Step 1188: {'lr': 0.000499490215787695, 'samples': 608768, 'steps': 1188, 'batch_loss/train': 0.9373837700113654} +12/21/2021 18:18:37 - INFO - codeparrot_training - Step 1189: {'lr': 0.0004994892091498247, 'samples': 609280, 'steps': 1189, 'batch_loss/train': 0.9094412056729198} +12/21/2021 18:18:47 - INFO - codeparrot_training - Step 1190: {'lr': 0.0004994882015200796, 'samples': 609792, 'steps': 1190, 'batch_loss/train': 0.9100059298798442} +12/21/2021 18:18:58 - INFO - codeparrot_training - Step 1191: {'lr': 0.0004994871928984635, 'samples': 610304, 'steps': 1191, 'batch_loss/train': 0.8352670585736632} +12/21/2021 18:19:10 - INFO - codeparrot_training - Step 1192: {'lr': 0.0004994861832849806, 'samples': 610816, 'steps': 1192, 'batch_loss/train': 0.8741358816623688} +12/21/2021 18:19:21 - INFO - codeparrot_training - Step 1193: {'lr': 0.0004994851726796347, 'samples': 611328, 'steps': 1193, 'batch_loss/train': 0.9233765387907624} +12/21/2021 18:19:32 - INFO - codeparrot_training - Step 1194: {'lr': 0.0004994841610824301, 'samples': 611840, 'steps': 1194, 'batch_loss/train': 0.9368011187762022} +12/21/2021 18:19:44 - INFO - codeparrot_training - Step 1195: {'lr': 0.0004994831484933706, 'samples': 612352, 'steps': 1195, 'batch_loss/train': 0.815311367623508} +12/21/2021 18:19:54 - INFO - codeparrot_training - Step 1196: {'lr': 0.0004994821349124603, 'samples': 612864, 'steps': 1196, 'batch_loss/train': 0.8949263100512326} +12/21/2021 18:20:05 - INFO - codeparrot_training - Step 1197: {'lr': 0.0004994811203397032, 'samples': 613376, 'steps': 1197, 'batch_loss/train': 0.8489316711202264} +12/21/2021 18:20:16 - INFO - codeparrot_training - Step 1198: {'lr': 0.0004994801047751035, 'samples': 613888, 'steps': 1198, 'batch_loss/train': 0.9408408664166927} +12/21/2021 18:20:28 - INFO - codeparrot_training - Step 1199: {'lr': 0.0004994790882186651, 'samples': 614400, 'steps': 1199, 'batch_loss/train': 0.9314808757044375} +12/21/2021 18:20:39 - INFO - codeparrot_training - Step 1200: {'lr': 0.000499478070670392, 'samples': 614912, 'steps': 1200, 'batch_loss/train': 0.9746698988601565} +12/21/2021 18:20:49 - INFO - codeparrot_training - Step 1201: {'lr': 0.0004994770521302882, 'samples': 615424, 'steps': 1201, 'batch_loss/train': 0.9159172605723143} +12/21/2021 18:21:01 - INFO - codeparrot_training - Step 1202: {'lr': 0.0004994760325983579, 'samples': 615936, 'steps': 1202, 'batch_loss/train': 0.8153248308226466} +12/21/2021 18:21:12 - INFO - codeparrot_training - Step 1203: {'lr': 0.0004994750120746051, 'samples': 616448, 'steps': 1203, 'batch_loss/train': 0.8492526998743415} +12/21/2021 18:21:22 - INFO - codeparrot_training - Step 1204: {'lr': 0.0004994739905590338, 'samples': 616960, 'steps': 1204, 'batch_loss/train': 0.8187123462557793} +12/21/2021 18:21:35 - INFO - codeparrot_training - Step 1205: {'lr': 0.0004994729680516482, 'samples': 617472, 'steps': 1205, 'batch_loss/train': 1.0016724057495594} +12/21/2021 18:21:46 - INFO - codeparrot_training - Step 1206: {'lr': 0.0004994719445524522, 'samples': 617984, 'steps': 1206, 'batch_loss/train': 0.8118661541957408} +12/21/2021 18:21:56 - INFO - codeparrot_training - Step 1207: {'lr': 0.00049947092006145, 'samples': 618496, 'steps': 1207, 'batch_loss/train': 1.0311933532357216} +12/21/2021 18:22:07 - INFO - codeparrot_training - Step 1208: {'lr': 0.0004994698945786456, 'samples': 619008, 'steps': 1208, 'batch_loss/train': 0.8503660308197141} +12/21/2021 18:22:19 - INFO - codeparrot_training - Step 1209: {'lr': 0.0004994688681040432, 'samples': 619520, 'steps': 1209, 'batch_loss/train': 0.9504197668284178} +12/21/2021 18:22:30 - INFO - codeparrot_training - Step 1210: {'lr': 0.0004994678406376466, 'samples': 620032, 'steps': 1210, 'batch_loss/train': 0.9647932406514883} +12/21/2021 18:22:40 - INFO - codeparrot_training - Step 1211: {'lr': 0.0004994668121794601, 'samples': 620544, 'steps': 1211, 'batch_loss/train': 0.8855375917628407} +12/21/2021 18:22:53 - INFO - codeparrot_training - Step 1212: {'lr': 0.0004994657827294878, 'samples': 621056, 'steps': 1212, 'batch_loss/train': 1.114992587827146} +12/21/2021 18:23:04 - INFO - codeparrot_training - Step 1213: {'lr': 0.0004994647522877337, 'samples': 621568, 'steps': 1213, 'batch_loss/train': 1.0224395813420415} +12/21/2021 18:23:14 - INFO - codeparrot_training - Step 1214: {'lr': 0.000499463720854202, 'samples': 622080, 'steps': 1214, 'batch_loss/train': 0.9078174885362387} +12/21/2021 18:23:27 - INFO - codeparrot_training - Step 1215: {'lr': 0.0004994626884288967, 'samples': 622592, 'steps': 1215, 'batch_loss/train': 0.9695952069014311} +12/21/2021 18:23:37 - INFO - codeparrot_training - Step 1216: {'lr': 0.0004994616550118219, 'samples': 623104, 'steps': 1216, 'batch_loss/train': 0.8072706330567598} +12/21/2021 18:23:48 - INFO - codeparrot_training - Step 1217: {'lr': 0.0004994606206029818, 'samples': 623616, 'steps': 1217, 'batch_loss/train': 0.8016591249033809} +12/21/2021 18:23:58 - INFO - codeparrot_training - Step 1218: {'lr': 0.0004994595852023804, 'samples': 624128, 'steps': 1218, 'batch_loss/train': 0.988537983968854} +12/21/2021 18:24:11 - INFO - codeparrot_training - Step 1219: {'lr': 0.000499458548810022, 'samples': 624640, 'steps': 1219, 'batch_loss/train': 0.8402985902503133} +12/21/2021 18:24:21 - INFO - codeparrot_training - Step 1220: {'lr': 0.0004994575114259105, 'samples': 625152, 'steps': 1220, 'batch_loss/train': 0.9254513545893133} +12/21/2021 18:24:32 - INFO - codeparrot_training - Step 1221: {'lr': 0.0004994564730500502, 'samples': 625664, 'steps': 1221, 'batch_loss/train': 0.9947165064513683} +12/21/2021 18:24:44 - INFO - codeparrot_training - Step 1222: {'lr': 0.0004994554336824452, 'samples': 626176, 'steps': 1222, 'batch_loss/train': 0.8540261844173074} +12/21/2021 18:24:55 - INFO - codeparrot_training - Step 1223: {'lr': 0.0004994543933230995, 'samples': 626688, 'steps': 1223, 'batch_loss/train': 0.8317353008314967} +12/21/2021 18:25:06 - INFO - codeparrot_training - Step 1224: {'lr': 0.0004994533519720172, 'samples': 627200, 'steps': 1224, 'batch_loss/train': 0.9294225857593119} +12/21/2021 18:25:18 - INFO - codeparrot_training - Step 1225: {'lr': 0.0004994523096292028, 'samples': 627712, 'steps': 1225, 'batch_loss/train': 0.9803953859955072} +12/21/2021 18:25:28 - INFO - codeparrot_training - Step 1226: {'lr': 0.0004994512662946601, 'samples': 628224, 'steps': 1226, 'batch_loss/train': 1.0673902332782745} +12/21/2021 18:25:39 - INFO - codeparrot_training - Step 1227: {'lr': 0.0004994502219683933, 'samples': 628736, 'steps': 1227, 'batch_loss/train': 0.9396049245260656} +12/21/2021 18:25:50 - INFO - codeparrot_training - Step 1228: {'lr': 0.0004994491766504067, 'samples': 629248, 'steps': 1228, 'batch_loss/train': 0.9172460688278079} +12/21/2021 18:26:02 - INFO - codeparrot_training - Step 1229: {'lr': 0.0004994481303407044, 'samples': 629760, 'steps': 1229, 'batch_loss/train': 0.8158911308273673} +12/21/2021 18:26:12 - INFO - codeparrot_training - Step 1230: {'lr': 0.0004994470830392905, 'samples': 630272, 'steps': 1230, 'batch_loss/train': 0.8511423089075834} +12/21/2021 18:26:23 - INFO - codeparrot_training - Step 1231: {'lr': 0.000499446034746169, 'samples': 630784, 'steps': 1231, 'batch_loss/train': 0.9460949646309018} +12/21/2021 18:26:35 - INFO - codeparrot_training - Step 1232: {'lr': 0.0004994449854613444, 'samples': 631296, 'steps': 1232, 'batch_loss/train': 0.8951304238289595} +12/21/2021 18:26:46 - INFO - codeparrot_training - Step 1233: {'lr': 0.0004994439351848208, 'samples': 631808, 'steps': 1233, 'batch_loss/train': 0.7241121765691787} +12/21/2021 18:26:56 - INFO - codeparrot_training - Step 1234: {'lr': 0.0004994428839166021, 'samples': 632320, 'steps': 1234, 'batch_loss/train': 0.8898308789357543} +12/21/2021 18:27:09 - INFO - codeparrot_training - Step 1235: {'lr': 0.0004994418316566928, 'samples': 632832, 'steps': 1235, 'batch_loss/train': 0.8237797874026} +12/21/2021 18:27:20 - INFO - codeparrot_training - Step 1236: {'lr': 0.000499440778405097, 'samples': 633344, 'steps': 1236, 'batch_loss/train': 0.8611332550644875} +12/21/2021 18:27:30 - INFO - codeparrot_training - Step 1237: {'lr': 0.0004994397241618188, 'samples': 633856, 'steps': 1237, 'batch_loss/train': 0.8826666735112667} +12/21/2021 18:27:41 - INFO - codeparrot_training - Step 1238: {'lr': 0.0004994386689268624, 'samples': 634368, 'steps': 1238, 'batch_loss/train': 0.8119998555630445} +12/21/2021 18:27:53 - INFO - codeparrot_training - Step 1239: {'lr': 0.0004994376127002322, 'samples': 634880, 'steps': 1239, 'batch_loss/train': 0.6617459214758128} +12/21/2021 18:28:04 - INFO - codeparrot_training - Step 1240: {'lr': 0.000499436555481932, 'samples': 635392, 'steps': 1240, 'batch_loss/train': 1.023212667554617} +12/21/2021 18:28:15 - INFO - codeparrot_training - Step 1241: {'lr': 0.0004994354972719663, 'samples': 635904, 'steps': 1241, 'batch_loss/train': 0.938759570941329} +12/21/2021 18:28:27 - INFO - codeparrot_training - Step 1242: {'lr': 0.0004994344380703394, 'samples': 636416, 'steps': 1242, 'batch_loss/train': 0.870722085237503} +12/21/2021 18:28:38 - INFO - codeparrot_training - Step 1243: {'lr': 0.0004994333778770552, 'samples': 636928, 'steps': 1243, 'batch_loss/train': 0.9078935999423265} +12/21/2021 18:28:48 - INFO - codeparrot_training - Step 1244: {'lr': 0.0004994323166921182, 'samples': 637440, 'steps': 1244, 'batch_loss/train': 0.8422485021874309} +12/21/2021 18:29:00 - INFO - codeparrot_training - Step 1245: {'lr': 0.0004994312545155324, 'samples': 637952, 'steps': 1245, 'batch_loss/train': 0.8362310873344541} +12/21/2021 18:29:11 - INFO - codeparrot_training - Step 1246: {'lr': 0.0004994301913473022, 'samples': 638464, 'steps': 1246, 'batch_loss/train': 0.9576555611565709} +12/21/2021 18:29:22 - INFO - codeparrot_training - Step 1247: {'lr': 0.0004994291271874316, 'samples': 638976, 'steps': 1247, 'batch_loss/train': 0.946130913682282} +12/21/2021 18:29:34 - INFO - codeparrot_training - Step 1248: {'lr': 0.000499428062035925, 'samples': 639488, 'steps': 1248, 'batch_loss/train': 0.9039501752704382} +12/21/2021 18:29:44 - INFO - codeparrot_training - Step 1249: {'lr': 0.0004994269958927867, 'samples': 640000, 'steps': 1249, 'batch_loss/train': 0.848575550597161} +12/21/2021 18:29:55 - INFO - codeparrot_training - Step 1250: {'lr': 0.0004994259287580209, 'samples': 640512, 'steps': 1250, 'batch_loss/train': 0.9087131032720208} +12/21/2021 18:30:06 - INFO - codeparrot_training - Step 1251: {'lr': 0.0004994248606316317, 'samples': 641024, 'steps': 1251, 'batch_loss/train': 0.8138246252201498} +12/21/2021 18:30:18 - INFO - codeparrot_training - Step 1252: {'lr': 0.0004994237915136233, 'samples': 641536, 'steps': 1252, 'batch_loss/train': 0.8964862320572138} +12/21/2021 18:30:29 - INFO - codeparrot_training - Step 1253: {'lr': 0.0004994227214040002, 'samples': 642048, 'steps': 1253, 'batch_loss/train': 0.8127140523865819} +12/21/2021 18:30:40 - INFO - codeparrot_training - Step 1254: {'lr': 0.0004994216503027665, 'samples': 642560, 'steps': 1254, 'batch_loss/train': 0.9757550451904535} +12/21/2021 18:30:52 - INFO - codeparrot_training - Step 1255: {'lr': 0.0004994205782099265, 'samples': 643072, 'steps': 1255, 'batch_loss/train': 0.8581873029470444} +12/21/2021 18:31:02 - INFO - codeparrot_training - Step 1256: {'lr': 0.0004994195051254846, 'samples': 643584, 'steps': 1256, 'batch_loss/train': 0.879802443087101} +12/21/2021 18:31:13 - INFO - codeparrot_training - Step 1257: {'lr': 0.0004994184310494448, 'samples': 644096, 'steps': 1257, 'batch_loss/train': 0.8712422819808125} +12/21/2021 18:31:25 - INFO - codeparrot_training - Step 1258: {'lr': 0.0004994173559818116, 'samples': 644608, 'steps': 1258, 'batch_loss/train': 0.8286894992925227} +12/21/2021 18:31:35 - INFO - codeparrot_training - Step 1259: {'lr': 0.0004994162799225889, 'samples': 645120, 'steps': 1259, 'batch_loss/train': 0.928883945569396} +12/21/2021 18:31:46 - INFO - codeparrot_training - Step 1260: {'lr': 0.0004994152028717816, 'samples': 645632, 'steps': 1260, 'batch_loss/train': 0.9047780660912395} +12/21/2021 18:31:57 - INFO - codeparrot_training - Step 1261: {'lr': 0.0004994141248293935, 'samples': 646144, 'steps': 1261, 'batch_loss/train': 1.0263357982039452} +12/21/2021 18:32:10 - INFO - codeparrot_training - Step 1262: {'lr': 0.0004994130457954288, 'samples': 646656, 'steps': 1262, 'batch_loss/train': 0.8393691079691052} +12/21/2021 18:32:20 - INFO - codeparrot_training - Step 1263: {'lr': 0.0004994119657698922, 'samples': 647168, 'steps': 1263, 'batch_loss/train': 0.9209253257140517} +12/21/2021 18:32:31 - INFO - codeparrot_training - Step 1264: {'lr': 0.0004994108847527878, 'samples': 647680, 'steps': 1264, 'batch_loss/train': 0.9038002192974091} +12/21/2021 18:32:43 - INFO - codeparrot_training - Step 1265: {'lr': 0.0004994098027441199, 'samples': 648192, 'steps': 1265, 'batch_loss/train': 0.9246189179830253} +12/21/2021 18:32:53 - INFO - codeparrot_training - Step 1266: {'lr': 0.0004994087197438928, 'samples': 648704, 'steps': 1266, 'batch_loss/train': 0.9447633372619748} +12/21/2021 18:33:04 - INFO - codeparrot_training - Step 1267: {'lr': 0.0004994076357521108, 'samples': 649216, 'steps': 1267, 'batch_loss/train': 0.9187744194641709} +12/21/2021 18:33:17 - INFO - codeparrot_training - Step 1268: {'lr': 0.0004994065507687781, 'samples': 649728, 'steps': 1268, 'batch_loss/train': 0.9471127837896347} +12/21/2021 18:33:27 - INFO - codeparrot_training - Step 1269: {'lr': 0.0004994054647938993, 'samples': 650240, 'steps': 1269, 'batch_loss/train': 1.3510896479710937} +12/21/2021 18:33:38 - INFO - codeparrot_training - Step 1270: {'lr': 0.0004994043778274783, 'samples': 650752, 'steps': 1270, 'batch_loss/train': 0.9727164045907557} +12/21/2021 18:33:49 - INFO - codeparrot_training - Step 1271: {'lr': 0.0004994032898695199, 'samples': 651264, 'steps': 1271, 'batch_loss/train': 0.8685201350599527} +12/21/2021 18:34:02 - INFO - codeparrot_training - Step 1272: {'lr': 0.0004994022009200281, 'samples': 651776, 'steps': 1272, 'batch_loss/train': 0.9125380613841116} +12/21/2021 18:34:12 - INFO - codeparrot_training - Step 1273: {'lr': 0.0004994011109790075, 'samples': 652288, 'steps': 1273, 'batch_loss/train': 0.9562150659039617} +12/21/2021 18:34:23 - INFO - codeparrot_training - Step 1274: {'lr': 0.000499400020046462, 'samples': 652800, 'steps': 1274, 'batch_loss/train': 0.9103519981727004} +12/21/2021 18:34:35 - INFO - codeparrot_training - Step 1275: {'lr': 0.0004993989281223963, 'samples': 653312, 'steps': 1275, 'batch_loss/train': 0.9870065823197365} +12/21/2021 18:34:45 - INFO - codeparrot_training - Step 1276: {'lr': 0.0004993978352068146, 'samples': 653824, 'steps': 1276, 'batch_loss/train': 0.960326406173408} +12/21/2021 18:34:56 - INFO - codeparrot_training - Step 1277: {'lr': 0.0004993967412997212, 'samples': 654336, 'steps': 1277, 'batch_loss/train': 1.0036738468334079} +12/21/2021 18:35:08 - INFO - codeparrot_training - Step 1278: {'lr': 0.0004993956464011207, 'samples': 654848, 'steps': 1278, 'batch_loss/train': 1.1145060686394572} +12/21/2021 18:35:19 - INFO - codeparrot_training - Step 1279: {'lr': 0.0004993945505110171, 'samples': 655360, 'steps': 1279, 'batch_loss/train': 0.76822858909145} +12/21/2021 18:35:29 - INFO - codeparrot_training - Step 1280: {'lr': 0.0004993934536294151, 'samples': 655872, 'steps': 1280, 'batch_loss/train': 0.8462537098675966} +12/21/2021 18:35:40 - INFO - codeparrot_training - Step 1281: {'lr': 0.0004993923557563188, 'samples': 656384, 'steps': 1281, 'batch_loss/train': 0.8669967195019126} +12/21/2021 18:35:53 - INFO - codeparrot_training - Step 1282: {'lr': 0.0004993912568917327, 'samples': 656896, 'steps': 1282, 'batch_loss/train': 0.9611996412277222} +12/21/2021 18:36:04 - INFO - codeparrot_training - Step 1283: {'lr': 0.000499390157035661, 'samples': 657408, 'steps': 1283, 'batch_loss/train': 0.9204590367153287} +12/21/2021 18:36:14 - INFO - codeparrot_training - Step 1284: {'lr': 0.0004993890561881084, 'samples': 657920, 'steps': 1284, 'batch_loss/train': 0.8793917093425989} +12/21/2021 18:36:26 - INFO - codeparrot_training - Step 1285: {'lr': 0.0004993879543490789, 'samples': 658432, 'steps': 1285, 'batch_loss/train': 0.9312426913529634} +12/21/2021 18:36:37 - INFO - codeparrot_training - Step 1286: {'lr': 0.0004993868515185772, 'samples': 658944, 'steps': 1286, 'batch_loss/train': 0.9649236835539341} +12/21/2021 18:36:47 - INFO - codeparrot_training - Step 1287: {'lr': 0.0004993857476966075, 'samples': 659456, 'steps': 1287, 'batch_loss/train': 0.9192196615040302} +12/21/2021 18:36:59 - INFO - codeparrot_training - Step 1288: {'lr': 0.0004993846428831742, 'samples': 659968, 'steps': 1288, 'batch_loss/train': 0.867634005844593} +12/21/2021 18:37:10 - INFO - codeparrot_training - Step 1289: {'lr': 0.0004993835370782817, 'samples': 660480, 'steps': 1289, 'batch_loss/train': 0.9262999817728996} +12/21/2021 18:37:20 - INFO - codeparrot_training - Step 1290: {'lr': 0.0004993824302819346, 'samples': 660992, 'steps': 1290, 'batch_loss/train': 0.8916918057948351} +12/21/2021 18:37:31 - INFO - codeparrot_training - Step 1291: {'lr': 0.0004993813224941369, 'samples': 661504, 'steps': 1291, 'batch_loss/train': 0.9068458424881101} +12/21/2021 18:37:44 - INFO - codeparrot_training - Step 1292: {'lr': 0.0004993802137148933, 'samples': 662016, 'steps': 1292, 'batch_loss/train': 0.7487506093457341} +12/21/2021 18:37:54 - INFO - codeparrot_training - Step 1293: {'lr': 0.0004993791039442082, 'samples': 662528, 'steps': 1293, 'batch_loss/train': 0.8621078282594681} +12/21/2021 18:38:05 - INFO - codeparrot_training - Step 1294: {'lr': 0.0004993779931820859, 'samples': 663040, 'steps': 1294, 'batch_loss/train': 0.9061572141945362} +12/21/2021 18:38:17 - INFO - codeparrot_training - Step 1295: {'lr': 0.0004993768814285308, 'samples': 663552, 'steps': 1295, 'batch_loss/train': 0.8956511998549104} +12/21/2021 18:38:28 - INFO - codeparrot_training - Step 1296: {'lr': 0.0004993757686835474, 'samples': 664064, 'steps': 1296, 'batch_loss/train': 0.8759795539081097} +12/21/2021 18:38:38 - INFO - codeparrot_training - Step 1297: {'lr': 0.0004993746549471402, 'samples': 664576, 'steps': 1297, 'batch_loss/train': 0.9617621544748545} +12/21/2021 18:38:51 - INFO - codeparrot_training - Step 1298: {'lr': 0.0004993735402193134, 'samples': 665088, 'steps': 1298, 'batch_loss/train': 0.957000813446939} +12/21/2021 18:39:02 - INFO - codeparrot_training - Step 1299: {'lr': 0.0004993724245000717, 'samples': 665600, 'steps': 1299, 'batch_loss/train': 1.0371154267340899} +12/21/2021 18:39:13 - INFO - codeparrot_training - Step 1300: {'lr': 0.0004993713077894194, 'samples': 666112, 'steps': 1300, 'batch_loss/train': 0.8866588901728392} +12/21/2021 18:39:23 - INFO - codeparrot_training - Step 1301: {'lr': 0.0004993701900873607, 'samples': 666624, 'steps': 1301, 'batch_loss/train': 0.7850203523412347} +12/21/2021 18:39:35 - INFO - codeparrot_training - Step 1302: {'lr': 0.0004993690713939005, 'samples': 667136, 'steps': 1302, 'batch_loss/train': 1.0228252205997705} +12/21/2021 18:39:46 - INFO - codeparrot_training - Step 1303: {'lr': 0.000499367951709043, 'samples': 667648, 'steps': 1303, 'batch_loss/train': 1.0434366362169385} +12/21/2021 18:39:56 - INFO - codeparrot_training - Step 1304: {'lr': 0.0004993668310327926, 'samples': 668160, 'steps': 1304, 'batch_loss/train': 0.7902837623842061} +12/21/2021 18:40:09 - INFO - codeparrot_training - Step 1305: {'lr': 0.000499365709365154, 'samples': 668672, 'steps': 1305, 'batch_loss/train': 0.9913491755723953} +12/21/2021 18:40:20 - INFO - codeparrot_training - Step 1306: {'lr': 0.0004993645867061312, 'samples': 669184, 'steps': 1306, 'batch_loss/train': 0.9016605205833912} +12/21/2021 18:40:30 - INFO - codeparrot_training - Step 1307: {'lr': 0.0004993634630557292, 'samples': 669696, 'steps': 1307, 'batch_loss/train': 0.5655952508095652} +12/21/2021 18:40:42 - INFO - codeparrot_training - Step 1308: {'lr': 0.0004993623384139523, 'samples': 670208, 'steps': 1308, 'batch_loss/train': 0.9662716723978519} +12/21/2021 18:40:53 - INFO - codeparrot_training - Step 1309: {'lr': 0.0004993612127808047, 'samples': 670720, 'steps': 1309, 'batch_loss/train': 0.9132321067154408} +12/21/2021 18:41:04 - INFO - codeparrot_training - Step 1310: {'lr': 0.0004993600861562912, 'samples': 671232, 'steps': 1310, 'batch_loss/train': 0.8492658697068691} +12/21/2021 18:41:14 - INFO - codeparrot_training - Step 1311: {'lr': 0.0004993589585404161, 'samples': 671744, 'steps': 1311, 'batch_loss/train': 0.8645156929269433} +12/21/2021 18:41:27 - INFO - codeparrot_training - Step 1312: {'lr': 0.0004993578299331839, 'samples': 672256, 'steps': 1312, 'batch_loss/train': 0.9388946956023574} +12/21/2021 18:41:38 - INFO - codeparrot_training - Step 1313: {'lr': 0.0004993567003345992, 'samples': 672768, 'steps': 1313, 'batch_loss/train': 0.9403657596558332} +12/21/2021 18:41:48 - INFO - codeparrot_training - Step 1314: {'lr': 0.0004993555697446663, 'samples': 673280, 'steps': 1314, 'batch_loss/train': 0.9188530156388879} +12/21/2021 18:42:01 - INFO - codeparrot_training - Step 1315: {'lr': 0.0004993544381633899, 'samples': 673792, 'steps': 1315, 'batch_loss/train': 0.710785408038646} +12/21/2021 18:42:11 - INFO - codeparrot_training - Step 1316: {'lr': 0.0004993533055907744, 'samples': 674304, 'steps': 1316, 'batch_loss/train': 0.8243555780500174} +12/21/2021 18:42:22 - INFO - codeparrot_training - Step 1317: {'lr': 0.0004993521720268243, 'samples': 674816, 'steps': 1317, 'batch_loss/train': 0.9317512549459934} +12/21/2021 18:42:34 - INFO - codeparrot_training - Step 1318: {'lr': 0.0004993510374715442, 'samples': 675328, 'steps': 1318, 'batch_loss/train': 0.9920937879942358} +12/21/2021 18:42:45 - INFO - codeparrot_training - Step 1319: {'lr': 0.0004993499019249383, 'samples': 675840, 'steps': 1319, 'batch_loss/train': 0.9533022390678525} +12/21/2021 18:42:55 - INFO - codeparrot_training - Step 1320: {'lr': 0.0004993487653870115, 'samples': 676352, 'steps': 1320, 'batch_loss/train': 0.8998299855738878} +12/21/2021 18:43:06 - INFO - codeparrot_training - Step 1321: {'lr': 0.0004993476278577682, 'samples': 676864, 'steps': 1321, 'batch_loss/train': 0.8547343546524644} +12/21/2021 18:43:19 - INFO - codeparrot_training - Step 1322: {'lr': 0.0004993464893372128, 'samples': 677376, 'steps': 1322, 'batch_loss/train': 1.0036962730810046} +12/21/2021 18:43:29 - INFO - codeparrot_training - Step 1323: {'lr': 0.00049934534982535, 'samples': 677888, 'steps': 1323, 'batch_loss/train': 0.9214518086519092} +12/21/2021 18:43:40 - INFO - codeparrot_training - Step 1324: {'lr': 0.0004993442093221841, 'samples': 678400, 'steps': 1324, 'batch_loss/train': 0.8624354796484113} +12/21/2021 18:43:52 - INFO - codeparrot_training - Step 1325: {'lr': 0.0004993430678277199, 'samples': 678912, 'steps': 1325, 'batch_loss/train': 1.0128678511828184} +12/21/2021 18:44:03 - INFO - codeparrot_training - Step 1326: {'lr': 0.0004993419253419617, 'samples': 679424, 'steps': 1326, 'batch_loss/train': 0.8773164730519056} +12/21/2021 18:44:13 - INFO - codeparrot_training - Step 1327: {'lr': 0.0004993407818649142, 'samples': 679936, 'steps': 1327, 'batch_loss/train': 0.8841583263128996} +12/21/2021 18:44:25 - INFO - codeparrot_training - Step 1328: {'lr': 0.0004993396373965819, 'samples': 680448, 'steps': 1328, 'batch_loss/train': 0.9331040475517511} +12/21/2021 18:44:36 - INFO - codeparrot_training - Step 1329: {'lr': 0.0004993384919369693, 'samples': 680960, 'steps': 1329, 'batch_loss/train': 0.8925506938248873} +12/21/2021 18:44:46 - INFO - codeparrot_training - Step 1330: {'lr': 0.0004993373454860811, 'samples': 681472, 'steps': 1330, 'batch_loss/train': 0.8381318263709545} +12/21/2021 18:44:59 - INFO - codeparrot_training - Step 1331: {'lr': 0.0004993361980439217, 'samples': 681984, 'steps': 1331, 'batch_loss/train': 0.9490224411711097} +12/21/2021 18:45:10 - INFO - codeparrot_training - Step 1332: {'lr': 0.0004993350496104958, 'samples': 682496, 'steps': 1332, 'batch_loss/train': 0.8712426014244556} +12/21/2021 18:45:20 - INFO - codeparrot_training - Step 1333: {'lr': 0.0004993339001858079, 'samples': 683008, 'steps': 1333, 'batch_loss/train': 1.007031557150185} +12/21/2021 18:45:31 - INFO - codeparrot_training - Step 1334: {'lr': 0.0004993327497698624, 'samples': 683520, 'steps': 1334, 'batch_loss/train': 0.8620072351768613} +12/21/2021 18:45:44 - INFO - codeparrot_training - Step 1335: {'lr': 0.0004993315983626641, 'samples': 684032, 'steps': 1335, 'batch_loss/train': 0.8929694406688213} +12/21/2021 18:45:55 - INFO - codeparrot_training - Step 1336: {'lr': 0.0004993304459642175, 'samples': 684544, 'steps': 1336, 'batch_loss/train': 0.9880138784646988} +12/21/2021 18:46:05 - INFO - codeparrot_training - Step 1337: {'lr': 0.0004993292925745272, 'samples': 685056, 'steps': 1337, 'batch_loss/train': 0.935458067804575} +12/21/2021 18:46:17 - INFO - codeparrot_training - Step 1338: {'lr': 0.0004993281381935978, 'samples': 685568, 'steps': 1338, 'batch_loss/train': 0.9177853753790259} +12/21/2021 18:46:28 - INFO - codeparrot_training - Step 1339: {'lr': 0.0004993269828214339, 'samples': 686080, 'steps': 1339, 'batch_loss/train': 1.0212460467591882} +12/21/2021 18:46:39 - INFO - codeparrot_training - Step 1340: {'lr': 0.0004993258264580399, 'samples': 686592, 'steps': 1340, 'batch_loss/train': 0.8958797240629792} +12/21/2021 18:46:52 - INFO - codeparrot_training - Step 1341: {'lr': 0.0004993246691034208, 'samples': 687104, 'steps': 1341, 'batch_loss/train': 0.8619717983528972} +12/21/2021 18:47:02 - INFO - codeparrot_training - Step 1342: {'lr': 0.0004993235107575807, 'samples': 687616, 'steps': 1342, 'batch_loss/train': 0.9788844976574183} +12/21/2021 18:47:13 - INFO - codeparrot_training - Step 1343: {'lr': 0.0004993223514205247, 'samples': 688128, 'steps': 1343, 'batch_loss/train': 0.9293665299192071} +12/21/2021 18:47:23 - INFO - codeparrot_training - Step 1344: {'lr': 0.0004993211910922571, 'samples': 688640, 'steps': 1344, 'batch_loss/train': 0.9605931024998426} +12/21/2021 18:47:35 - INFO - codeparrot_training - Step 1345: {'lr': 0.0004993200297727825, 'samples': 689152, 'steps': 1345, 'batch_loss/train': 0.8961412999778986} +12/21/2021 18:47:46 - INFO - codeparrot_training - Step 1346: {'lr': 0.0004993188674621056, 'samples': 689664, 'steps': 1346, 'batch_loss/train': 0.9617785345762968} +12/21/2021 18:47:56 - INFO - codeparrot_training - Step 1347: {'lr': 0.0004993177041602311, 'samples': 690176, 'steps': 1347, 'batch_loss/train': 0.9072169419378042} +12/21/2021 18:48:09 - INFO - codeparrot_training - Step 1348: {'lr': 0.0004993165398671635, 'samples': 690688, 'steps': 1348, 'batch_loss/train': 0.7944323308765888} +12/21/2021 18:48:19 - INFO - codeparrot_training - Step 1349: {'lr': 0.0004993153745829075, 'samples': 691200, 'steps': 1349, 'batch_loss/train': 0.8820071071386337} +12/21/2021 18:48:30 - INFO - codeparrot_training - Step 1350: {'lr': 0.0004993142083074677, 'samples': 691712, 'steps': 1350, 'batch_loss/train': 0.9674626467749476} +12/21/2021 18:48:42 - INFO - codeparrot_training - Step 1351: {'lr': 0.0004993130410408487, 'samples': 692224, 'steps': 1351, 'batch_loss/train': 0.8883714079856873} +12/21/2021 18:48:53 - INFO - codeparrot_training - Step 1352: {'lr': 0.0004993118727830553, 'samples': 692736, 'steps': 1352, 'batch_loss/train': 0.8666070178151131} +12/21/2021 18:49:04 - INFO - codeparrot_training - Step 1353: {'lr': 0.0004993107035340919, 'samples': 693248, 'steps': 1353, 'batch_loss/train': 0.8847145279869437} +12/21/2021 18:49:14 - INFO - codeparrot_training - Step 1354: {'lr': 0.0004993095332939633, 'samples': 693760, 'steps': 1354, 'batch_loss/train': 0.9575709011405706} +12/21/2021 18:49:26 - INFO - codeparrot_training - Step 1355: {'lr': 0.0004993083620626742, 'samples': 694272, 'steps': 1355, 'batch_loss/train': 0.9073086497373879} +12/21/2021 18:49:37 - INFO - codeparrot_training - Step 1356: {'lr': 0.0004993071898402292, 'samples': 694784, 'steps': 1356, 'batch_loss/train': 0.9253665707074106} +12/21/2021 18:49:47 - INFO - codeparrot_training - Step 1357: {'lr': 0.0004993060166266329, 'samples': 695296, 'steps': 1357, 'batch_loss/train': 0.8852570233866572} +12/21/2021 18:50:00 - INFO - codeparrot_training - Step 1358: {'lr': 0.00049930484242189, 'samples': 695808, 'steps': 1358, 'batch_loss/train': 0.8214883715845644} +12/21/2021 18:50:11 - INFO - codeparrot_training - Step 1359: {'lr': 0.0004993036672260052, 'samples': 696320, 'steps': 1359, 'batch_loss/train': 0.9004403837025166} +12/21/2021 18:50:22 - INFO - codeparrot_training - Step 1360: {'lr': 0.0004993024910389831, 'samples': 696832, 'steps': 1360, 'batch_loss/train': 0.9973794119432569} +12/21/2021 18:50:34 - INFO - codeparrot_training - Step 1361: {'lr': 0.0004993013138608284, 'samples': 697344, 'steps': 1361, 'batch_loss/train': 0.9549466390162706} +12/21/2021 18:50:44 - INFO - codeparrot_training - Step 1362: {'lr': 0.0004993001356915459, 'samples': 697856, 'steps': 1362, 'batch_loss/train': 0.8603733228519559} +12/21/2021 18:50:55 - INFO - codeparrot_training - Step 1363: {'lr': 0.0004992989565311402, 'samples': 698368, 'steps': 1363, 'batch_loss/train': 0.9401235659606755} +12/21/2021 18:51:07 - INFO - codeparrot_training - Step 1364: {'lr': 0.0004992977763796161, 'samples': 698880, 'steps': 1364, 'batch_loss/train': 0.8158649448305368} +12/21/2021 18:51:17 - INFO - codeparrot_training - Step 1365: {'lr': 0.000499296595236978, 'samples': 699392, 'steps': 1365, 'batch_loss/train': 1.001479273661971} +12/21/2021 18:51:28 - INFO - codeparrot_training - Step 1366: {'lr': 0.0004992954131032307, 'samples': 699904, 'steps': 1366, 'batch_loss/train': 0.9982989002019167} +12/21/2021 18:51:39 - INFO - codeparrot_training - Step 1367: {'lr': 0.0004992942299783792, 'samples': 700416, 'steps': 1367, 'batch_loss/train': 0.9400274325162172} +12/21/2021 18:51:51 - INFO - codeparrot_training - Step 1368: {'lr': 0.0004992930458624277, 'samples': 700928, 'steps': 1368, 'batch_loss/train': 0.8175384434871376} +12/21/2021 18:52:02 - INFO - codeparrot_training - Step 1369: {'lr': 0.0004992918607553814, 'samples': 701440, 'steps': 1369, 'batch_loss/train': 0.8123994609341025} +12/21/2021 18:52:13 - INFO - codeparrot_training - Step 1370: {'lr': 0.0004992906746572447, 'samples': 701952, 'steps': 1370, 'batch_loss/train': 0.8737145038321614} +12/21/2021 18:52:25 - INFO - codeparrot_training - Step 1371: {'lr': 0.0004992894875680224, 'samples': 702464, 'steps': 1371, 'batch_loss/train': 0.8616317110136151} +12/21/2021 18:52:35 - INFO - codeparrot_training - Step 1372: {'lr': 0.0004992882994877192, 'samples': 702976, 'steps': 1372, 'batch_loss/train': 0.8232845973689109} +12/21/2021 18:52:46 - INFO - codeparrot_training - Step 1373: {'lr': 0.00049928711041634, 'samples': 703488, 'steps': 1373, 'batch_loss/train': 0.8644114304333925} +12/21/2021 18:52:58 - INFO - codeparrot_training - Step 1374: {'lr': 0.0004992859203538892, 'samples': 704000, 'steps': 1374, 'batch_loss/train': 0.8904834846034646} +12/21/2021 18:53:08 - INFO - codeparrot_training - Step 1375: {'lr': 0.0004992847293003717, 'samples': 704512, 'steps': 1375, 'batch_loss/train': 0.8894833009690046} +12/21/2021 18:53:19 - INFO - codeparrot_training - Step 1376: {'lr': 0.0004992835372557924, 'samples': 705024, 'steps': 1376, 'batch_loss/train': 0.8786670342087746} +12/21/2021 18:53:29 - INFO - codeparrot_training - Step 1377: {'lr': 0.0004992823442201558, 'samples': 705536, 'steps': 1377, 'batch_loss/train': 0.8387476159259677} +12/21/2021 18:53:42 - INFO - codeparrot_training - Step 1378: {'lr': 0.0004992811501934667, 'samples': 706048, 'steps': 1378, 'batch_loss/train': 0.975234052632004} +12/21/2021 18:53:52 - INFO - codeparrot_training - Step 1379: {'lr': 0.0004992799551757299, 'samples': 706560, 'steps': 1379, 'batch_loss/train': 0.890184338670224} +12/21/2021 18:54:03 - INFO - codeparrot_training - Step 1380: {'lr': 0.0004992787591669501, 'samples': 707072, 'steps': 1380, 'batch_loss/train': 0.8847593450918794} +12/21/2021 18:54:16 - INFO - codeparrot_training - Step 1381: {'lr': 0.0004992775621671322, 'samples': 707584, 'steps': 1381, 'batch_loss/train': 0.8043151115998626} +12/21/2021 18:54:27 - INFO - codeparrot_training - Step 1382: {'lr': 0.0004992763641762806, 'samples': 708096, 'steps': 1382, 'batch_loss/train': 1.0100176334381104} +12/21/2021 18:54:37 - INFO - codeparrot_training - Step 1383: {'lr': 0.0004992751651944005, 'samples': 708608, 'steps': 1383, 'batch_loss/train': 0.8148020063526928} +12/21/2021 18:54:49 - INFO - codeparrot_training - Step 1384: {'lr': 0.0004992739652214964, 'samples': 709120, 'steps': 1384, 'batch_loss/train': 0.8589685750193894} +12/21/2021 18:55:00 - INFO - codeparrot_training - Step 1385: {'lr': 0.0004992727642575731, 'samples': 709632, 'steps': 1385, 'batch_loss/train': 0.8756378502584994} +12/21/2021 18:55:11 - INFO - codeparrot_training - Step 1386: {'lr': 0.0004992715623026355, 'samples': 710144, 'steps': 1386, 'batch_loss/train': 0.9711198946461082} +12/21/2021 18:55:21 - INFO - codeparrot_training - Step 1387: {'lr': 0.0004992703593566882, 'samples': 710656, 'steps': 1387, 'batch_loss/train': 0.7297414396889508} +12/21/2021 18:55:33 - INFO - codeparrot_training - Step 1388: {'lr': 0.0004992691554197361, 'samples': 711168, 'steps': 1388, 'batch_loss/train': 0.9059382630512118} +12/21/2021 18:55:44 - INFO - codeparrot_training - Step 1389: {'lr': 0.000499267950491784, 'samples': 711680, 'steps': 1389, 'batch_loss/train': 1.543355810455978} +12/21/2021 18:55:54 - INFO - codeparrot_training - Step 1390: {'lr': 0.0004992667445728366, 'samples': 712192, 'steps': 1390, 'batch_loss/train': 0.9728460945188999} +12/21/2021 18:56:07 - INFO - codeparrot_training - Step 1391: {'lr': 0.0004992655376628987, 'samples': 712704, 'steps': 1391, 'batch_loss/train': 0.9879138451069593} +12/21/2021 18:56:18 - INFO - codeparrot_training - Step 1392: {'lr': 0.0004992643297619752, 'samples': 713216, 'steps': 1392, 'batch_loss/train': 0.9045278923586011} +12/21/2021 18:56:28 - INFO - codeparrot_training - Step 1393: {'lr': 0.0004992631208700709, 'samples': 713728, 'steps': 1393, 'batch_loss/train': 0.9775938503444195} +12/21/2021 18:56:40 - INFO - codeparrot_training - Step 1394: {'lr': 0.0004992619109871904, 'samples': 714240, 'steps': 1394, 'batch_loss/train': 0.8892487827688456} +12/21/2021 18:56:51 - INFO - codeparrot_training - Step 1395: {'lr': 0.0004992607001133388, 'samples': 714752, 'steps': 1395, 'batch_loss/train': 0.8694000644609332} +12/21/2021 18:57:02 - INFO - codeparrot_training - Step 1396: {'lr': 0.0004992594882485208, 'samples': 715264, 'steps': 1396, 'batch_loss/train': 0.8429547948762774} +12/21/2021 18:57:12 - INFO - codeparrot_training - Step 1397: {'lr': 0.0004992582753927411, 'samples': 715776, 'steps': 1397, 'batch_loss/train': 0.9060738850384951} +12/21/2021 18:57:26 - INFO - codeparrot_training - Step 1398: {'lr': 0.0004992570615460045, 'samples': 716288, 'steps': 1398, 'batch_loss/train': 1.281076810322702} +12/21/2021 18:57:36 - INFO - codeparrot_training - Step 1399: {'lr': 0.0004992558467083162, 'samples': 716800, 'steps': 1399, 'batch_loss/train': 0.9588902518153191} +12/21/2021 18:57:47 - INFO - codeparrot_training - Step 1400: {'lr': 0.0004992546308796805, 'samples': 717312, 'steps': 1400, 'batch_loss/train': 0.7826875653117895} +12/21/2021 18:57:59 - INFO - codeparrot_training - Step 1401: {'lr': 0.0004992534140601027, 'samples': 717824, 'steps': 1401, 'batch_loss/train': 0.9038854958489537} +12/21/2021 18:58:10 - INFO - codeparrot_training - Step 1402: {'lr': 0.0004992521962495874, 'samples': 718336, 'steps': 1402, 'batch_loss/train': 0.9273305330425501} +12/21/2021 18:58:20 - INFO - codeparrot_training - Step 1403: {'lr': 0.0004992509774481394, 'samples': 718848, 'steps': 1403, 'batch_loss/train': 1.0787429315969348} +12/21/2021 18:58:32 - INFO - codeparrot_training - Step 1404: {'lr': 0.0004992497576557637, 'samples': 719360, 'steps': 1404, 'batch_loss/train': 0.9390718201175332} +12/21/2021 18:58:43 - INFO - codeparrot_training - Step 1405: {'lr': 0.0004992485368724651, 'samples': 719872, 'steps': 1405, 'batch_loss/train': 1.0478160828351974} +12/21/2021 18:58:54 - INFO - codeparrot_training - Step 1406: {'lr': 0.0004992473150982484, 'samples': 720384, 'steps': 1406, 'batch_loss/train': 1.003836389631033} +12/21/2021 18:59:04 - INFO - codeparrot_training - Step 1407: {'lr': 0.0004992460923331184, 'samples': 720896, 'steps': 1407, 'batch_loss/train': 0.9099589590914547} +12/21/2021 18:59:17 - INFO - codeparrot_training - Step 1408: {'lr': 0.0004992448685770802, 'samples': 721408, 'steps': 1408, 'batch_loss/train': 0.8307887779083103} +12/21/2021 18:59:28 - INFO - codeparrot_training - Step 1409: {'lr': 0.0004992436438301384, 'samples': 721920, 'steps': 1409, 'batch_loss/train': 0.7110651433467865} +12/21/2021 18:59:39 - INFO - codeparrot_training - Step 1410: {'lr': 0.000499242418092298, 'samples': 722432, 'steps': 1410, 'batch_loss/train': 0.7149399314075708} +12/21/2021 18:59:51 - INFO - codeparrot_training - Step 1411: {'lr': 0.000499241191363564, 'samples': 722944, 'steps': 1411, 'batch_loss/train': 0.8380734208039939} +12/21/2021 19:00:01 - INFO - codeparrot_training - Step 1412: {'lr': 0.000499239963643941, 'samples': 723456, 'steps': 1412, 'batch_loss/train': 0.8435431718826294} +12/21/2021 19:00:12 - INFO - codeparrot_training - Step 1413: {'lr': 0.0004992387349334339, 'samples': 723968, 'steps': 1413, 'batch_loss/train': 0.8829627665691078} +12/21/2021 19:00:24 - INFO - codeparrot_training - Step 1414: {'lr': 0.000499237505232048, 'samples': 724480, 'steps': 1414, 'batch_loss/train': 0.7680662181228399} +12/21/2021 19:00:34 - INFO - codeparrot_training - Step 1415: {'lr': 0.0004992362745397876, 'samples': 724992, 'steps': 1415, 'batch_loss/train': 0.8271518922410905} +12/21/2021 19:00:45 - INFO - codeparrot_training - Step 1416: {'lr': 0.000499235042856658, 'samples': 725504, 'steps': 1416, 'batch_loss/train': 0.9793215957470238} +12/21/2021 19:00:56 - INFO - codeparrot_training - Step 1417: {'lr': 0.000499233810182664, 'samples': 726016, 'steps': 1417, 'batch_loss/train': 0.9254060639068484} +12/21/2021 19:01:08 - INFO - codeparrot_training - Step 1418: {'lr': 0.0004992325765178104, 'samples': 726528, 'steps': 1418, 'batch_loss/train': 0.8961984254419804} +12/21/2021 19:01:19 - INFO - codeparrot_training - Step 1419: {'lr': 0.0004992313418621023, 'samples': 727040, 'steps': 1419, 'batch_loss/train': 0.8967116167768836} +12/21/2021 19:01:29 - INFO - codeparrot_training - Step 1420: {'lr': 0.0004992301062155443, 'samples': 727552, 'steps': 1420, 'batch_loss/train': 0.8577445903792977} +12/21/2021 19:01:42 - INFO - codeparrot_training - Step 1421: {'lr': 0.0004992288695781417, 'samples': 728064, 'steps': 1421, 'batch_loss/train': 0.9045203067362309} +12/21/2021 19:01:53 - INFO - codeparrot_training - Step 1422: {'lr': 0.0004992276319498992, 'samples': 728576, 'steps': 1422, 'batch_loss/train': 0.8740531024523079} +12/21/2021 19:02:03 - INFO - codeparrot_training - Step 1423: {'lr': 0.0004992263933308217, 'samples': 729088, 'steps': 1423, 'batch_loss/train': 0.8829802665859461} +12/21/2021 19:02:15 - INFO - codeparrot_training - Step 1424: {'lr': 0.000499225153720914, 'samples': 729600, 'steps': 1424, 'batch_loss/train': 0.9505403861403465} +12/21/2021 19:02:26 - INFO - codeparrot_training - Step 1425: {'lr': 0.0004992239131201815, 'samples': 730112, 'steps': 1425, 'batch_loss/train': 0.7963016289286315} +12/21/2021 19:02:36 - INFO - codeparrot_training - Step 1426: {'lr': 0.0004992226715286285, 'samples': 730624, 'steps': 1426, 'batch_loss/train': 0.9058397859334946} +12/21/2021 19:02:47 - INFO - codeparrot_training - Step 1427: {'lr': 0.0004992214289462606, 'samples': 731136, 'steps': 1427, 'batch_loss/train': 0.9820694969967008} +12/21/2021 19:03:00 - INFO - codeparrot_training - Step 1428: {'lr': 0.0004992201853730821, 'samples': 731648, 'steps': 1428, 'batch_loss/train': 0.8871501497924328} +12/21/2021 19:03:11 - INFO - codeparrot_training - Step 1429: {'lr': 0.0004992189408090984, 'samples': 732160, 'steps': 1429, 'batch_loss/train': 0.9270755974575877} +12/21/2021 19:03:21 - INFO - codeparrot_training - Step 1430: {'lr': 0.0004992176952543142, 'samples': 732672, 'steps': 1430, 'batch_loss/train': 0.8956932174041867} +12/21/2021 19:03:33 - INFO - codeparrot_training - Step 1431: {'lr': 0.0004992164487087347, 'samples': 733184, 'steps': 1431, 'batch_loss/train': 0.9295681174844503} +12/21/2021 19:03:44 - INFO - codeparrot_training - Step 1432: {'lr': 0.0004992152011723646, 'samples': 733696, 'steps': 1432, 'batch_loss/train': 0.8500270834192634} +12/21/2021 19:03:54 - INFO - codeparrot_training - Step 1433: {'lr': 0.000499213952645209, 'samples': 734208, 'steps': 1433, 'batch_loss/train': 0.9154115151613951} +12/21/2021 19:04:06 - INFO - codeparrot_training - Step 1434: {'lr': 0.0004992127031272727, 'samples': 734720, 'steps': 1434, 'batch_loss/train': 0.7609016797505319} +12/21/2021 19:04:17 - INFO - codeparrot_training - Step 1435: {'lr': 0.000499211452618561, 'samples': 735232, 'steps': 1435, 'batch_loss/train': 0.9242110941559076} +12/21/2021 19:04:28 - INFO - codeparrot_training - Step 1436: {'lr': 0.0004992102011190785, 'samples': 735744, 'steps': 1436, 'batch_loss/train': 0.8660112479701638} +12/21/2021 19:04:38 - INFO - codeparrot_training - Step 1437: {'lr': 0.0004992089486288303, 'samples': 736256, 'steps': 1437, 'batch_loss/train': 0.8736496847122908} +12/21/2021 19:04:51 - INFO - codeparrot_training - Step 1438: {'lr': 0.0004992076951478216, 'samples': 736768, 'steps': 1438, 'batch_loss/train': 0.9058330673724413} +12/21/2021 19:05:02 - INFO - codeparrot_training - Step 1439: {'lr': 0.0004992064406760571, 'samples': 737280, 'steps': 1439, 'batch_loss/train': 0.7826402503997087} +12/21/2021 19:05:13 - INFO - codeparrot_training - Step 1440: {'lr': 0.000499205185213542, 'samples': 737792, 'steps': 1440, 'batch_loss/train': 0.9729894632473588} +12/21/2021 19:05:25 - INFO - codeparrot_training - Step 1441: {'lr': 0.000499203928760281, 'samples': 738304, 'steps': 1441, 'batch_loss/train': 0.7506841602735221} +12/21/2021 19:05:35 - INFO - codeparrot_training - Step 1442: {'lr': 0.0004992026713162793, 'samples': 738816, 'steps': 1442, 'batch_loss/train': 0.8113713748753071} +12/21/2021 19:05:46 - INFO - codeparrot_training - Step 1443: {'lr': 0.000499201412881542, 'samples': 739328, 'steps': 1443, 'batch_loss/train': 0.8861641632393003} +12/21/2021 19:05:59 - INFO - codeparrot_training - Step 1444: {'lr': 0.0004992001534560738, 'samples': 739840, 'steps': 1444, 'batch_loss/train': 0.9161312039941549} +12/21/2021 19:06:09 - INFO - codeparrot_training - Step 1445: {'lr': 0.0004991988930398801, 'samples': 740352, 'steps': 1445, 'batch_loss/train': 0.8818469177931547} +12/21/2021 19:06:20 - INFO - codeparrot_training - Step 1446: {'lr': 0.0004991976316329655, 'samples': 740864, 'steps': 1446, 'batch_loss/train': 0.9179386273026466} +12/21/2021 19:06:30 - INFO - codeparrot_training - Step 1447: {'lr': 0.0004991963692353353, 'samples': 741376, 'steps': 1447, 'batch_loss/train': 0.9280525110661983} +12/21/2021 19:06:42 - INFO - codeparrot_training - Step 1448: {'lr': 0.0004991951058469945, 'samples': 741888, 'steps': 1448, 'batch_loss/train': 0.9176689023151994} +12/21/2021 19:06:53 - INFO - codeparrot_training - Step 1449: {'lr': 0.0004991938414679479, 'samples': 742400, 'steps': 1449, 'batch_loss/train': 0.9053338039666414} +12/21/2021 19:07:04 - INFO - codeparrot_training - Step 1450: {'lr': 0.0004991925760982008, 'samples': 742912, 'steps': 1450, 'batch_loss/train': 0.8886358682066202} +12/21/2021 19:07:16 - INFO - codeparrot_training - Step 1451: {'lr': 0.0004991913097377581, 'samples': 743424, 'steps': 1451, 'batch_loss/train': 0.8458385253325105} +12/21/2021 19:07:27 - INFO - codeparrot_training - Step 1452: {'lr': 0.0004991900423866246, 'samples': 743936, 'steps': 1452, 'batch_loss/train': 0.8727362100034952} +12/21/2021 19:07:37 - INFO - codeparrot_training - Step 1453: {'lr': 0.0004991887740448059, 'samples': 744448, 'steps': 1453, 'batch_loss/train': 0.8100819559767842} +12/21/2021 19:07:49 - INFO - codeparrot_training - Step 1454: {'lr': 0.0004991875047123066, 'samples': 744960, 'steps': 1454, 'batch_loss/train': 0.9352717623114586} +12/21/2021 19:08:00 - INFO - codeparrot_training - Step 1455: {'lr': 0.0004991862343891319, 'samples': 745472, 'steps': 1455, 'batch_loss/train': 1.0207194704562426} +12/21/2021 19:08:11 - INFO - codeparrot_training - Step 1456: {'lr': 0.0004991849630752868, 'samples': 745984, 'steps': 1456, 'batch_loss/train': 0.9129997259005904} +12/21/2021 19:08:21 - INFO - codeparrot_training - Step 1457: {'lr': 0.0004991836907707765, 'samples': 746496, 'steps': 1457, 'batch_loss/train': 0.8793914546258748} +12/21/2021 19:08:34 - INFO - codeparrot_training - Step 1458: {'lr': 0.0004991824174756057, 'samples': 747008, 'steps': 1458, 'batch_loss/train': 0.8392095658928156} +12/21/2021 19:08:45 - INFO - codeparrot_training - Step 1459: {'lr': 0.0004991811431897798, 'samples': 747520, 'steps': 1459, 'batch_loss/train': 0.9201991688460112} +12/21/2021 19:08:55 - INFO - codeparrot_training - Step 1460: {'lr': 0.0004991798679133037, 'samples': 748032, 'steps': 1460, 'batch_loss/train': 0.9655222529545426} +12/21/2021 19:09:07 - INFO - codeparrot_training - Step 1461: {'lr': 0.0004991785916461826, 'samples': 748544, 'steps': 1461, 'batch_loss/train': 0.8345540221780539} +12/21/2021 19:09:18 - INFO - codeparrot_training - Step 1462: {'lr': 0.0004991773143884216, 'samples': 749056, 'steps': 1462, 'batch_loss/train': 0.9990052655339241} +12/21/2021 19:09:28 - INFO - codeparrot_training - Step 1463: {'lr': 0.0004991760361400256, 'samples': 749568, 'steps': 1463, 'batch_loss/train': 0.957309759221971} +12/21/2021 19:09:40 - INFO - codeparrot_training - Step 1464: {'lr': 0.0004991747569009997, 'samples': 750080, 'steps': 1464, 'batch_loss/train': 0.9088741270825267} +12/21/2021 19:09:51 - INFO - codeparrot_training - Step 1465: {'lr': 0.0004991734766713491, 'samples': 750592, 'steps': 1465, 'batch_loss/train': 0.8389442404732108} +12/21/2021 19:10:02 - INFO - codeparrot_training - Step 1466: {'lr': 0.0004991721954510788, 'samples': 751104, 'steps': 1466, 'batch_loss/train': 0.889693749602884} +12/21/2021 19:10:12 - INFO - codeparrot_training - Step 1467: {'lr': 0.000499170913240194, 'samples': 751616, 'steps': 1467, 'batch_loss/train': 0.9019071599468589} +12/21/2021 19:10:25 - INFO - codeparrot_training - Step 1468: {'lr': 0.0004991696300386998, 'samples': 752128, 'steps': 1468, 'batch_loss/train': 0.9565586755052209} +12/21/2021 19:10:36 - INFO - codeparrot_training - Step 1469: {'lr': 0.0004991683458466011, 'samples': 752640, 'steps': 1469, 'batch_loss/train': 1.0009773131459951} +12/21/2021 19:10:46 - INFO - codeparrot_training - Step 1470: {'lr': 0.0004991670606639032, 'samples': 753152, 'steps': 1470, 'batch_loss/train': 0.7662123013287783} +12/21/2021 19:10:59 - INFO - codeparrot_training - Step 1471: {'lr': 0.0004991657744906112, 'samples': 753664, 'steps': 1471, 'batch_loss/train': 0.6708750370889902} +12/21/2021 19:11:09 - INFO - codeparrot_training - Step 1472: {'lr': 0.00049916448732673, 'samples': 754176, 'steps': 1472, 'batch_loss/train': 0.8803734052926302} +12/21/2021 19:11:20 - INFO - codeparrot_training - Step 1473: {'lr': 0.000499163199172265, 'samples': 754688, 'steps': 1473, 'batch_loss/train': 0.9705691980198026} +12/21/2021 19:11:33 - INFO - codeparrot_training - Step 1474: {'lr': 0.0004991619100272211, 'samples': 755200, 'steps': 1474, 'batch_loss/train': 0.9967543762177229} +12/21/2021 19:11:43 - INFO - codeparrot_training - Step 1475: {'lr': 0.0004991606198916037, 'samples': 755712, 'steps': 1475, 'batch_loss/train': 0.8363066669553518} +12/21/2021 19:11:54 - INFO - codeparrot_training - Step 1476: {'lr': 0.0004991593287654177, 'samples': 756224, 'steps': 1476, 'batch_loss/train': 0.8749741897918284} +12/21/2021 19:12:04 - INFO - codeparrot_training - Step 1477: {'lr': 0.0004991580366486682, 'samples': 756736, 'steps': 1477, 'batch_loss/train': 0.895608157850802} +12/21/2021 19:12:17 - INFO - codeparrot_training - Step 1478: {'lr': 0.0004991567435413604, 'samples': 757248, 'steps': 1478, 'batch_loss/train': 0.9263411909341812} +12/21/2021 19:12:27 - INFO - codeparrot_training - Step 1479: {'lr': 0.0004991554494434996, 'samples': 757760, 'steps': 1479, 'batch_loss/train': 0.7372247721068561} +12/21/2021 19:12:38 - INFO - codeparrot_training - Step 1480: {'lr': 0.0004991541543550907, 'samples': 758272, 'steps': 1480, 'batch_loss/train': 0.8418326985556632} +12/21/2021 19:12:50 - INFO - codeparrot_training - Step 1481: {'lr': 0.0004991528582761391, 'samples': 758784, 'steps': 1481, 'batch_loss/train': 1.209502063691616} +12/21/2021 19:13:01 - INFO - codeparrot_training - Step 1482: {'lr': 0.0004991515612066496, 'samples': 759296, 'steps': 1482, 'batch_loss/train': 1.087610300630331} +12/21/2021 19:13:12 - INFO - codeparrot_training - Step 1483: {'lr': 0.0004991502631466277, 'samples': 759808, 'steps': 1483, 'batch_loss/train': 0.8514039069414139} +12/21/2021 19:13:24 - INFO - codeparrot_training - Step 1484: {'lr': 0.0004991489640960785, 'samples': 760320, 'steps': 1484, 'batch_loss/train': 0.9606312597170472} +12/21/2021 19:13:35 - INFO - codeparrot_training - Step 1485: {'lr': 0.000499147664055007, 'samples': 760832, 'steps': 1485, 'batch_loss/train': 0.9134987881407142} +12/21/2021 19:13:46 - INFO - codeparrot_training - Step 1486: {'lr': 0.0004991463630234184, 'samples': 761344, 'steps': 1486, 'batch_loss/train': 0.860897995531559} +12/21/2021 19:13:56 - INFO - codeparrot_training - Step 1487: {'lr': 0.000499145061001318, 'samples': 761856, 'steps': 1487, 'batch_loss/train': 0.8509105974808335} +12/21/2021 19:14:08 - INFO - codeparrot_training - Step 1488: {'lr': 0.0004991437579887109, 'samples': 762368, 'steps': 1488, 'batch_loss/train': 0.8100118604488671} +12/21/2021 19:14:19 - INFO - codeparrot_training - Step 1489: {'lr': 0.0004991424539856024, 'samples': 762880, 'steps': 1489, 'batch_loss/train': 0.9670460168272257} +12/21/2021 19:14:30 - INFO - codeparrot_training - Step 1490: {'lr': 0.0004991411489919975, 'samples': 763392, 'steps': 1490, 'batch_loss/train': 0.9150240542367101} +12/21/2021 19:14:42 - INFO - codeparrot_training - Step 1491: {'lr': 0.0004991398430079013, 'samples': 763904, 'steps': 1491, 'batch_loss/train': 0.8878517271950841} +12/21/2021 19:14:52 - INFO - codeparrot_training - Step 1492: {'lr': 0.0004991385360333194, 'samples': 764416, 'steps': 1492, 'batch_loss/train': 0.9301278153434396} +12/21/2021 19:15:03 - INFO - codeparrot_training - Step 1493: {'lr': 0.0004991372280682566, 'samples': 764928, 'steps': 1493, 'batch_loss/train': 0.8494605850428343} +12/21/2021 19:15:16 - INFO - codeparrot_training - Step 1494: {'lr': 0.0004991359191127184, 'samples': 765440, 'steps': 1494, 'batch_loss/train': 1.1543506849557161} +12/21/2021 19:15:26 - INFO - codeparrot_training - Step 1495: {'lr': 0.0004991346091667097, 'samples': 765952, 'steps': 1495, 'batch_loss/train': 0.8929167026653886} +12/21/2021 19:15:37 - INFO - codeparrot_training - Step 1496: {'lr': 0.000499133298230236, 'samples': 766464, 'steps': 1496, 'batch_loss/train': 0.8789736004546285} +12/21/2021 19:15:48 - INFO - codeparrot_training - Step 1497: {'lr': 0.0004991319863033023, 'samples': 766976, 'steps': 1497, 'batch_loss/train': 0.8890458294190466} +12/21/2021 19:16:00 - INFO - codeparrot_training - Step 1498: {'lr': 0.0004991306733859138, 'samples': 767488, 'steps': 1498, 'batch_loss/train': 0.8582348180934787} +12/21/2021 19:16:10 - INFO - codeparrot_training - Step 1499: {'lr': 0.000499129359478076, 'samples': 768000, 'steps': 1499, 'batch_loss/train': 0.9286510422825813} +12/21/2021 19:16:21 - INFO - codeparrot_training - Step 1500: {'lr': 0.0004991280445797938, 'samples': 768512, 'steps': 1500, 'batch_loss/train': 0.9479373442009091} +12/21/2021 19:16:33 - INFO - codeparrot_training - Step 1501: {'lr': 0.0004991267286910726, 'samples': 769024, 'steps': 1501, 'batch_loss/train': 1.016344478353858} +12/21/2021 19:16:44 - INFO - codeparrot_training - Step 1502: {'lr': 0.0004991254118119176, 'samples': 769536, 'steps': 1502, 'batch_loss/train': 0.8211573557928205} +12/21/2021 19:16:54 - INFO - codeparrot_training - Step 1503: {'lr': 0.000499124093942334, 'samples': 770048, 'steps': 1503, 'batch_loss/train': 1.0534568205475807} +12/21/2021 19:17:07 - INFO - codeparrot_training - Step 1504: {'lr': 0.000499122775082327, 'samples': 770560, 'steps': 1504, 'batch_loss/train': 0.878528674133122} +12/21/2021 19:17:17 - INFO - codeparrot_training - Step 1505: {'lr': 0.0004991214552319021, 'samples': 771072, 'steps': 1505, 'batch_loss/train': 0.8986593801528215} +12/21/2021 19:17:28 - INFO - codeparrot_training - Step 1506: {'lr': 0.0004991201343910643, 'samples': 771584, 'steps': 1506, 'batch_loss/train': 1.2879918431863189} +12/21/2021 19:17:38 - INFO - codeparrot_training - Step 1507: {'lr': 0.0004991188125598188, 'samples': 772096, 'steps': 1507, 'batch_loss/train': 1.0066184606403112} +12/21/2021 19:17:51 - INFO - codeparrot_training - Step 1508: {'lr': 0.000499117489738171, 'samples': 772608, 'steps': 1508, 'batch_loss/train': 0.9410562459379435} +12/21/2021 19:18:02 - INFO - codeparrot_training - Step 1509: {'lr': 0.0004991161659261262, 'samples': 773120, 'steps': 1509, 'batch_loss/train': 0.9238882078789175} +12/21/2021 19:18:12 - INFO - codeparrot_training - Step 1510: {'lr': 0.0004991148411236895, 'samples': 773632, 'steps': 1510, 'batch_loss/train': 0.79955212213099} +12/21/2021 19:18:24 - INFO - codeparrot_training - Step 1511: {'lr': 0.0004991135153308663, 'samples': 774144, 'steps': 1511, 'batch_loss/train': 0.9009072091430426} +12/21/2021 19:18:35 - INFO - codeparrot_training - Step 1512: {'lr': 0.0004991121885476619, 'samples': 774656, 'steps': 1512, 'batch_loss/train': 0.7902439776808023} +12/21/2021 19:18:46 - INFO - codeparrot_training - Step 1513: {'lr': 0.0004991108607740814, 'samples': 775168, 'steps': 1513, 'batch_loss/train': 0.9307892825454473} +12/21/2021 19:18:58 - INFO - codeparrot_training - Step 1514: {'lr': 0.0004991095320101303, 'samples': 775680, 'steps': 1514, 'batch_loss/train': 0.8853988570626825} +12/21/2021 19:19:09 - INFO - codeparrot_training - Step 1515: {'lr': 0.0004991082022558135, 'samples': 776192, 'steps': 1515, 'batch_loss/train': 0.9643477988429368} +12/21/2021 19:19:20 - INFO - codeparrot_training - Step 1516: {'lr': 0.0004991068715111367, 'samples': 776704, 'steps': 1516, 'batch_loss/train': 0.9457984799519181} +12/21/2021 19:19:30 - INFO - codeparrot_training - Step 1517: {'lr': 0.0004991055397761051, 'samples': 777216, 'steps': 1517, 'batch_loss/train': 0.8710153824649751} +12/21/2021 19:19:42 - INFO - codeparrot_training - Step 1518: {'lr': 0.0004991042070507239, 'samples': 777728, 'steps': 1518, 'batch_loss/train': 0.9458189494907856} +12/21/2021 19:19:53 - INFO - codeparrot_training - Step 1519: {'lr': 0.0004991028733349984, 'samples': 778240, 'steps': 1519, 'batch_loss/train': 0.8499181373044848} +12/21/2021 19:20:04 - INFO - codeparrot_training - Step 1520: {'lr': 0.0004991015386289339, 'samples': 778752, 'steps': 1520, 'batch_loss/train': 0.9264786168932915} +12/21/2021 19:20:16 - INFO - codeparrot_training - Step 1521: {'lr': 0.0004991002029325357, 'samples': 779264, 'steps': 1521, 'batch_loss/train': 0.8807628750801086} +12/21/2021 19:20:26 - INFO - codeparrot_training - Step 1522: {'lr': 0.0004990988662458094, 'samples': 779776, 'steps': 1522, 'batch_loss/train': 0.9004290448501706} +12/21/2021 19:20:37 - INFO - codeparrot_training - Step 1523: {'lr': 0.0004990975285687598, 'samples': 780288, 'steps': 1523, 'batch_loss/train': 0.9843099880963564} +12/21/2021 19:20:50 - INFO - codeparrot_training - Step 1524: {'lr': 0.0004990961899013927, 'samples': 780800, 'steps': 1524, 'batch_loss/train': 0.8712683441117406} +12/21/2021 19:21:01 - INFO - codeparrot_training - Step 1525: {'lr': 0.0004990948502437131, 'samples': 781312, 'steps': 1525, 'batch_loss/train': 0.7869816906750202} +12/21/2021 19:21:11 - INFO - codeparrot_training - Step 1526: {'lr': 0.0004990935095957263, 'samples': 781824, 'steps': 1526, 'batch_loss/train': 0.7774976945947856} +12/21/2021 19:21:22 - INFO - codeparrot_training - Step 1527: {'lr': 0.0004990921679574379, 'samples': 782336, 'steps': 1527, 'batch_loss/train': 0.9547456745058298} +12/21/2021 19:21:34 - INFO - codeparrot_training - Step 1528: {'lr': 0.0004990908253288531, 'samples': 782848, 'steps': 1528, 'batch_loss/train': 0.8119464088231325} +12/21/2021 19:21:44 - INFO - codeparrot_training - Step 1529: {'lr': 0.0004990894817099771, 'samples': 783360, 'steps': 1529, 'batch_loss/train': 0.9305125717073679} +12/21/2021 19:21:55 - INFO - codeparrot_training - Step 1530: {'lr': 0.0004990881371008155, 'samples': 783872, 'steps': 1530, 'batch_loss/train': 0.8607888668775558} +12/21/2021 19:22:07 - INFO - codeparrot_training - Step 1531: {'lr': 0.0004990867915013735, 'samples': 784384, 'steps': 1531, 'batch_loss/train': 0.9708341555669904} +12/21/2021 19:22:18 - INFO - codeparrot_training - Step 1532: {'lr': 0.0004990854449116565, 'samples': 784896, 'steps': 1532, 'batch_loss/train': 0.8977903034538031} +12/21/2021 19:22:28 - INFO - codeparrot_training - Step 1533: {'lr': 0.0004990840973316697, 'samples': 785408, 'steps': 1533, 'batch_loss/train': 0.871687388047576} +12/21/2021 19:22:41 - INFO - codeparrot_training - Step 1534: {'lr': 0.0004990827487614187, 'samples': 785920, 'steps': 1534, 'batch_loss/train': 1.1145816231146455} +12/21/2021 19:22:51 - INFO - codeparrot_training - Step 1535: {'lr': 0.0004990813992009086, 'samples': 786432, 'steps': 1535, 'batch_loss/train': 0.8210630025714636} +12/21/2021 19:23:02 - INFO - codeparrot_training - Step 1536: {'lr': 0.0004990800486501452, 'samples': 786944, 'steps': 1536, 'batch_loss/train': 0.8837874531745911} +12/21/2021 19:23:12 - INFO - codeparrot_training - Step 1537: {'lr': 0.0004990786971091334, 'samples': 787456, 'steps': 1537, 'batch_loss/train': 0.8106287140399218} +12/21/2021 19:23:25 - INFO - codeparrot_training - Step 1538: {'lr': 0.0004990773445778786, 'samples': 787968, 'steps': 1538, 'batch_loss/train': 0.8841955773532391} +12/21/2021 19:23:36 - INFO - codeparrot_training - Step 1539: {'lr': 0.0004990759910563865, 'samples': 788480, 'steps': 1539, 'batch_loss/train': 0.8380987984128296} +12/21/2021 19:23:46 - INFO - codeparrot_training - Step 1540: {'lr': 0.0004990746365446622, 'samples': 788992, 'steps': 1540, 'batch_loss/train': 0.6680723326280713} +12/21/2021 19:23:59 - INFO - codeparrot_training - Step 1541: {'lr': 0.0004990732810427113, 'samples': 789504, 'steps': 1541, 'batch_loss/train': 0.9859416512772441} +12/21/2021 19:24:09 - INFO - codeparrot_training - Step 1542: {'lr': 0.0004990719245505391, 'samples': 790016, 'steps': 1542, 'batch_loss/train': 0.958910996094346} +12/21/2021 19:24:20 - INFO - codeparrot_training - Step 1543: {'lr': 0.0004990705670681508, 'samples': 790528, 'steps': 1543, 'batch_loss/train': 0.8678007861599326} +12/21/2021 19:24:33 - INFO - codeparrot_training - Step 1544: {'lr': 0.0004990692085955522, 'samples': 791040, 'steps': 1544, 'batch_loss/train': 0.8476616442203522} +12/21/2021 19:24:44 - INFO - codeparrot_training - Step 1545: {'lr': 0.0004990678491327482, 'samples': 791552, 'steps': 1545, 'batch_loss/train': 0.8259910992346704} +12/21/2021 19:24:54 - INFO - codeparrot_training - Step 1546: {'lr': 0.0004990664886797447, 'samples': 792064, 'steps': 1546, 'batch_loss/train': 0.8504134751856327} +12/21/2021 19:25:05 - INFO - codeparrot_training - Step 1547: {'lr': 0.0004990651272365469, 'samples': 792576, 'steps': 1547, 'batch_loss/train': 0.8602686319500208} +12/21/2021 19:25:17 - INFO - codeparrot_training - Step 1548: {'lr': 0.00049906376480316, 'samples': 793088, 'steps': 1548, 'batch_loss/train': 1.0521958786994219} +12/21/2021 19:25:28 - INFO - codeparrot_training - Step 1549: {'lr': 0.0004990624013795898, 'samples': 793600, 'steps': 1549, 'batch_loss/train': 0.9907089453190565} +12/21/2021 19:25:38 - INFO - codeparrot_training - Step 1550: {'lr': 0.0004990610369658414, 'samples': 794112, 'steps': 1550, 'batch_loss/train': 0.946219832636416} +12/21/2021 19:25:51 - INFO - codeparrot_training - Step 1551: {'lr': 0.0004990596715619205, 'samples': 794624, 'steps': 1551, 'batch_loss/train': 0.868032711558044} +12/21/2021 19:26:02 - INFO - codeparrot_training - Step 1552: {'lr': 0.0004990583051678322, 'samples': 795136, 'steps': 1552, 'batch_loss/train': 0.884167302865535} +12/21/2021 19:26:12 - INFO - codeparrot_training - Step 1553: {'lr': 0.0004990569377835823, 'samples': 795648, 'steps': 1553, 'batch_loss/train': 0.6480185776017606} +12/21/2021 19:26:23 - INFO - codeparrot_training - Step 1554: {'lr': 0.0004990555694091759, 'samples': 796160, 'steps': 1554, 'batch_loss/train': 0.8735680351965129} +12/21/2021 19:26:36 - INFO - codeparrot_training - Step 1555: {'lr': 0.0004990542000446188, 'samples': 796672, 'steps': 1555, 'batch_loss/train': 0.9366705920547247} +12/21/2021 19:26:46 - INFO - codeparrot_training - Step 1556: {'lr': 0.0004990528296899161, 'samples': 797184, 'steps': 1556, 'batch_loss/train': 0.8278948925435543} +12/21/2021 19:26:57 - INFO - codeparrot_training - Step 1557: {'lr': 0.0004990514583450734, 'samples': 797696, 'steps': 1557, 'batch_loss/train': 0.9266274953261018} +12/21/2021 19:27:09 - INFO - codeparrot_training - Step 1558: {'lr': 0.0004990500860100961, 'samples': 798208, 'steps': 1558, 'batch_loss/train': 0.9197458419948816} +12/21/2021 19:27:20 - INFO - codeparrot_training - Step 1559: {'lr': 0.0004990487126849898, 'samples': 798720, 'steps': 1559, 'batch_loss/train': 0.8157045301049948} +12/21/2021 19:27:30 - INFO - codeparrot_training - Step 1560: {'lr': 0.0004990473383697597, 'samples': 799232, 'steps': 1560, 'batch_loss/train': 0.9552362952381372} +12/21/2021 19:27:43 - INFO - codeparrot_training - Step 1561: {'lr': 0.0004990459630644116, 'samples': 799744, 'steps': 1561, 'batch_loss/train': 0.8295984999276698} +12/21/2021 19:27:54 - INFO - codeparrot_training - Step 1562: {'lr': 0.0004990445867689506, 'samples': 800256, 'steps': 1562, 'batch_loss/train': 0.9468330757226795} +12/21/2021 19:28:04 - INFO - codeparrot_training - Step 1563: {'lr': 0.0004990432094833826, 'samples': 800768, 'steps': 1563, 'batch_loss/train': 0.9141863500699401} +12/21/2021 19:28:15 - INFO - codeparrot_training - Step 1564: {'lr': 0.0004990418312077127, 'samples': 801280, 'steps': 1564, 'batch_loss/train': 0.8741412414237857} +12/21/2021 19:28:27 - INFO - codeparrot_training - Step 1565: {'lr': 0.0004990404519419464, 'samples': 801792, 'steps': 1565, 'batch_loss/train': 1.0191202405840158} +12/21/2021 19:28:38 - INFO - codeparrot_training - Step 1566: {'lr': 0.0004990390716860894, 'samples': 802304, 'steps': 1566, 'batch_loss/train': 0.8509348733350635} +12/21/2021 19:28:48 - INFO - codeparrot_training - Step 1567: {'lr': 0.0004990376904401471, 'samples': 802816, 'steps': 1567, 'batch_loss/train': 0.9865949116647243} +12/21/2021 19:29:01 - INFO - codeparrot_training - Step 1568: {'lr': 0.000499036308204125, 'samples': 803328, 'steps': 1568, 'batch_loss/train': 0.8997511016204953} +12/21/2021 19:29:11 - INFO - codeparrot_training - Step 1569: {'lr': 0.0004990349249780285, 'samples': 803840, 'steps': 1569, 'batch_loss/train': 0.9291855967603624} +12/21/2021 19:29:22 - INFO - codeparrot_training - Step 1570: {'lr': 0.0004990335407618632, 'samples': 804352, 'steps': 1570, 'batch_loss/train': 0.8769066073000431} +12/21/2021 19:29:34 - INFO - codeparrot_training - Step 1571: {'lr': 0.0004990321555556346, 'samples': 804864, 'steps': 1571, 'batch_loss/train': 0.9875875562429428} +12/21/2021 19:29:44 - INFO - codeparrot_training - Step 1572: {'lr': 0.0004990307693593482, 'samples': 805376, 'steps': 1572, 'batch_loss/train': 0.8935456285253167} +12/21/2021 19:29:55 - INFO - codeparrot_training - Step 1573: {'lr': 0.0004990293821730094, 'samples': 805888, 'steps': 1573, 'batch_loss/train': 0.9274824447929859} +12/21/2021 19:30:05 - INFO - codeparrot_training - Step 1574: {'lr': 0.0004990279939966239, 'samples': 806400, 'steps': 1574, 'batch_loss/train': 0.9043461563996971} +12/21/2021 19:30:19 - INFO - codeparrot_training - Step 1575: {'lr': 0.0004990266048301971, 'samples': 806912, 'steps': 1575, 'batch_loss/train': 0.8452258533798158} +12/21/2021 19:30:29 - INFO - codeparrot_training - Step 1576: {'lr': 0.0004990252146737345, 'samples': 807424, 'steps': 1576, 'batch_loss/train': 1.0736794285476208} +12/21/2021 19:30:40 - INFO - codeparrot_training - Step 1577: {'lr': 0.0004990238235272418, 'samples': 807936, 'steps': 1577, 'batch_loss/train': 0.9411089336499572} +12/21/2021 19:30:52 - INFO - codeparrot_training - Step 1578: {'lr': 0.0004990224313907243, 'samples': 808448, 'steps': 1578, 'batch_loss/train': 0.7255055927671492} +12/21/2021 19:31:02 - INFO - codeparrot_training - Step 1579: {'lr': 0.0004990210382641875, 'samples': 808960, 'steps': 1579, 'batch_loss/train': 0.9548053853213787} +12/21/2021 19:31:13 - INFO - codeparrot_training - Step 1580: {'lr': 0.0004990196441476373, 'samples': 809472, 'steps': 1580, 'batch_loss/train': 0.8864284581504762} +12/21/2021 19:31:25 - INFO - codeparrot_training - Step 1581: {'lr': 0.0004990182490410789, 'samples': 809984, 'steps': 1581, 'batch_loss/train': 1.0247873272746801} +12/21/2021 19:31:36 - INFO - codeparrot_training - Step 1582: {'lr': 0.000499016852944518, 'samples': 810496, 'steps': 1582, 'batch_loss/train': 0.8404687298461795} +12/21/2021 19:31:46 - INFO - codeparrot_training - Step 1583: {'lr': 0.0004990154558579602, 'samples': 811008, 'steps': 1583, 'batch_loss/train': 0.935194200836122} +12/21/2021 19:31:57 - INFO - codeparrot_training - Step 1584: {'lr': 0.0004990140577814109, 'samples': 811520, 'steps': 1584, 'batch_loss/train': 1.0090470649302006} +12/21/2021 19:32:10 - INFO - codeparrot_training - Step 1585: {'lr': 0.0004990126587148758, 'samples': 812032, 'steps': 1585, 'batch_loss/train': 0.9786989237181842} +12/21/2021 19:32:20 - INFO - codeparrot_training - Step 1586: {'lr': 0.0004990112586583603, 'samples': 812544, 'steps': 1586, 'batch_loss/train': 0.8996252031065524} +12/21/2021 19:32:31 - INFO - codeparrot_training - Step 1587: {'lr': 0.0004990098576118701, 'samples': 813056, 'steps': 1587, 'batch_loss/train': 0.9940212331712246} +12/21/2021 19:32:43 - INFO - codeparrot_training - Step 1588: {'lr': 0.0004990084555754107, 'samples': 813568, 'steps': 1588, 'batch_loss/train': 0.9426584369502962} +12/21/2021 19:32:54 - INFO - codeparrot_training - Step 1589: {'lr': 0.0004990070525489877, 'samples': 814080, 'steps': 1589, 'batch_loss/train': 0.9588494468480349} +12/21/2021 19:33:04 - INFO - codeparrot_training - Step 1590: {'lr': 0.0004990056485326067, 'samples': 814592, 'steps': 1590, 'batch_loss/train': 0.900996298994869} +12/21/2021 19:33:17 - INFO - codeparrot_training - Step 1591: {'lr': 0.0004990042435262733, 'samples': 815104, 'steps': 1591, 'batch_loss/train': 0.8317434769123793} +12/21/2021 19:33:28 - INFO - codeparrot_training - Step 1592: {'lr': 0.000499002837529993, 'samples': 815616, 'steps': 1592, 'batch_loss/train': 0.8489330234006047} +12/21/2021 19:33:38 - INFO - codeparrot_training - Step 1593: {'lr': 0.0004990014305437714, 'samples': 816128, 'steps': 1593, 'batch_loss/train': 0.854260669555515} +12/21/2021 19:33:49 - INFO - codeparrot_training - Step 1594: {'lr': 0.0004990000225676142, 'samples': 816640, 'steps': 1594, 'batch_loss/train': 0.834496327675879} +12/21/2021 19:34:01 - INFO - codeparrot_training - Step 1595: {'lr': 0.000498998613601527, 'samples': 817152, 'steps': 1595, 'batch_loss/train': 0.929279656149447} +12/21/2021 19:34:12 - INFO - codeparrot_training - Step 1596: {'lr': 0.0004989972036455151, 'samples': 817664, 'steps': 1596, 'batch_loss/train': 0.8766897656023502} +12/21/2021 19:34:22 - INFO - codeparrot_training - Step 1597: {'lr': 0.0004989957926995846, 'samples': 818176, 'steps': 1597, 'batch_loss/train': 0.8777226358652115} +12/21/2021 19:34:34 - INFO - codeparrot_training - Step 1598: {'lr': 0.0004989943807637407, 'samples': 818688, 'steps': 1598, 'batch_loss/train': 0.8732022782787681} +12/21/2021 19:34:45 - INFO - codeparrot_training - Step 1599: {'lr': 0.0004989929678379892, 'samples': 819200, 'steps': 1599, 'batch_loss/train': 0.8743901252746582} +12/21/2021 19:34:56 - INFO - codeparrot_training - Step 1600: {'lr': 0.0004989915539223357, 'samples': 819712, 'steps': 1600, 'batch_loss/train': 0.8758493410423398} +12/21/2021 19:35:08 - INFO - codeparrot_training - Step 1601: {'lr': 0.0004989901390167857, 'samples': 820224, 'steps': 1601, 'batch_loss/train': 0.9535415591672063} +12/21/2021 19:35:19 - INFO - codeparrot_training - Step 1602: {'lr': 0.0004989887231213449, 'samples': 820736, 'steps': 1602, 'batch_loss/train': 0.9203783189877868} +12/21/2021 19:35:30 - INFO - codeparrot_training - Step 1603: {'lr': 0.0004989873062360191, 'samples': 821248, 'steps': 1603, 'batch_loss/train': 0.8980770474299788} +12/21/2021 19:35:42 - INFO - codeparrot_training - Step 1604: {'lr': 0.0004989858883608137, 'samples': 821760, 'steps': 1604, 'batch_loss/train': 0.9070390276610851} +12/21/2021 19:35:53 - INFO - codeparrot_training - Step 1605: {'lr': 0.0004989844694957343, 'samples': 822272, 'steps': 1605, 'batch_loss/train': 0.8865944016724825} +12/21/2021 19:36:03 - INFO - codeparrot_training - Step 1606: {'lr': 0.0004989830496407869, 'samples': 822784, 'steps': 1606, 'batch_loss/train': 0.8270519720390439} +12/21/2021 19:36:14 - INFO - codeparrot_training - Step 1607: {'lr': 0.0004989816287959768, 'samples': 823296, 'steps': 1607, 'batch_loss/train': 0.9952144976705313} +12/21/2021 19:36:26 - INFO - codeparrot_training - Step 1608: {'lr': 0.0004989802069613097, 'samples': 823808, 'steps': 1608, 'batch_loss/train': 0.8741616876795888} +12/21/2021 19:36:37 - INFO - codeparrot_training - Step 1609: {'lr': 0.0004989787841367914, 'samples': 824320, 'steps': 1609, 'batch_loss/train': 0.9387342985719442} +12/21/2021 19:36:47 - INFO - codeparrot_training - Step 1610: {'lr': 0.0004989773603224274, 'samples': 824832, 'steps': 1610, 'batch_loss/train': 0.9511869661509991} +12/21/2021 19:36:59 - INFO - codeparrot_training - Step 1611: {'lr': 0.0004989759355182235, 'samples': 825344, 'steps': 1611, 'batch_loss/train': 1.0106961466372013} +12/21/2021 19:37:10 - INFO - codeparrot_training - Step 1612: {'lr': 0.0004989745097241853, 'samples': 825856, 'steps': 1612, 'batch_loss/train': 0.8387086261063814} +12/21/2021 19:37:21 - INFO - codeparrot_training - Step 1613: {'lr': 0.0004989730829403185, 'samples': 826368, 'steps': 1613, 'batch_loss/train': 0.8310669106431305} +12/21/2021 19:37:33 - INFO - codeparrot_training - Step 1614: {'lr': 0.0004989716551666285, 'samples': 826880, 'steps': 1614, 'batch_loss/train': 0.9509173333644867} +12/21/2021 19:37:44 - INFO - codeparrot_training - Step 1615: {'lr': 0.0004989702264031213, 'samples': 827392, 'steps': 1615, 'batch_loss/train': 0.9248645920306444} +12/21/2021 19:37:55 - INFO - codeparrot_training - Step 1616: {'lr': 0.0004989687966498026, 'samples': 827904, 'steps': 1616, 'batch_loss/train': 0.833437523804605} +12/21/2021 19:38:05 - INFO - codeparrot_training - Step 1617: {'lr': 0.0004989673659066779, 'samples': 828416, 'steps': 1617, 'batch_loss/train': 0.9388104099780321} +12/21/2021 19:38:18 - INFO - codeparrot_training - Step 1618: {'lr': 0.000498965934173753, 'samples': 828928, 'steps': 1618, 'batch_loss/train': 0.9078132705762982} +12/21/2021 19:38:28 - INFO - codeparrot_training - Step 1619: {'lr': 0.0004989645014510334, 'samples': 829440, 'steps': 1619, 'batch_loss/train': 0.8750639786012471} +12/21/2021 19:38:39 - INFO - codeparrot_training - Step 1620: {'lr': 0.0004989630677385251, 'samples': 829952, 'steps': 1620, 'batch_loss/train': 0.8077618693932891} +12/21/2021 19:38:51 - INFO - codeparrot_training - Step 1621: {'lr': 0.0004989616330362337, 'samples': 830464, 'steps': 1621, 'batch_loss/train': 0.8755777953192592} +12/21/2021 19:39:02 - INFO - codeparrot_training - Step 1622: {'lr': 0.0004989601973441647, 'samples': 830976, 'steps': 1622, 'batch_loss/train': 0.880197555758059} +12/21/2021 19:39:13 - INFO - codeparrot_training - Step 1623: {'lr': 0.000498958760662324, 'samples': 831488, 'steps': 1623, 'batch_loss/train': 0.9659086624160409} +12/21/2021 19:39:25 - INFO - codeparrot_training - Step 1624: {'lr': 0.0004989573229907172, 'samples': 832000, 'steps': 1624, 'batch_loss/train': 0.9231716627255082} +12/21/2021 19:39:36 - INFO - codeparrot_training - Step 1625: {'lr': 0.0004989558843293503, 'samples': 832512, 'steps': 1625, 'batch_loss/train': 1.0634433487430215} +12/21/2021 19:39:47 - INFO - codeparrot_training - Step 1626: {'lr': 0.0004989544446782287, 'samples': 833024, 'steps': 1626, 'batch_loss/train': 0.8936382224783301} +12/21/2021 19:39:57 - INFO - codeparrot_training - Step 1627: {'lr': 0.0004989530040373582, 'samples': 833536, 'steps': 1627, 'batch_loss/train': 0.9219766827300191} +12/21/2021 19:40:09 - INFO - codeparrot_training - Step 1628: {'lr': 0.0004989515624067445, 'samples': 834048, 'steps': 1628, 'batch_loss/train': 0.8224241929128766} +12/21/2021 19:40:20 - INFO - codeparrot_training - Step 1629: {'lr': 0.0004989501197863934, 'samples': 834560, 'steps': 1629, 'batch_loss/train': 0.8785842685028911} +12/21/2021 19:40:31 - INFO - codeparrot_training - Step 1630: {'lr': 0.0004989486761763108, 'samples': 835072, 'steps': 1630, 'batch_loss/train': 0.932359257247299} +12/21/2021 19:40:43 - INFO - codeparrot_training - Step 1631: {'lr': 0.0004989472315765021, 'samples': 835584, 'steps': 1631, 'batch_loss/train': 1.027493633562699} +12/21/2021 19:40:54 - INFO - codeparrot_training - Step 1632: {'lr': 0.0004989457859869733, 'samples': 836096, 'steps': 1632, 'batch_loss/train': 0.8971542920917273} +12/21/2021 19:41:05 - INFO - codeparrot_training - Step 1633: {'lr': 0.00049894433940773, 'samples': 836608, 'steps': 1633, 'batch_loss/train': 0.9591789881233126} +12/21/2021 19:41:15 - INFO - codeparrot_training - Step 1634: {'lr': 0.000498942891838778, 'samples': 837120, 'steps': 1634, 'batch_loss/train': 0.8635334779974073} +12/21/2021 19:41:27 - INFO - codeparrot_training - Step 1635: {'lr': 0.000498941443280123, 'samples': 837632, 'steps': 1635, 'batch_loss/train': 0.8572245980612934} +12/21/2021 19:41:38 - INFO - codeparrot_training - Step 1636: {'lr': 0.0004989399937317709, 'samples': 838144, 'steps': 1636, 'batch_loss/train': 0.8242830978706479} +12/21/2021 19:41:49 - INFO - codeparrot_training - Step 1637: {'lr': 0.0004989385431937274, 'samples': 838656, 'steps': 1637, 'batch_loss/train': 0.8978930844459683} +12/21/2021 19:42:01 - INFO - codeparrot_training - Step 1638: {'lr': 0.0004989370916659982, 'samples': 839168, 'steps': 1638, 'batch_loss/train': 0.8647003355436027} +12/21/2021 19:42:11 - INFO - codeparrot_training - Step 1639: {'lr': 0.0004989356391485892, 'samples': 839680, 'steps': 1639, 'batch_loss/train': 0.794282587012276} +12/21/2021 19:42:22 - INFO - codeparrot_training - Step 1640: {'lr': 0.000498934185641506, 'samples': 840192, 'steps': 1640, 'batch_loss/train': 0.9602486668154597} +12/21/2021 19:42:35 - INFO - codeparrot_training - Step 1641: {'lr': 0.0004989327311447544, 'samples': 840704, 'steps': 1641, 'batch_loss/train': 0.8559561893343925} +12/21/2021 19:42:45 - INFO - codeparrot_training - Step 1642: {'lr': 0.0004989312756583404, 'samples': 841216, 'steps': 1642, 'batch_loss/train': 0.9191740900278091} +12/21/2021 19:42:56 - INFO - codeparrot_training - Step 1643: {'lr': 0.0004989298191822696, 'samples': 841728, 'steps': 1643, 'batch_loss/train': 0.8940439550206065} +12/21/2021 19:43:06 - INFO - codeparrot_training - Step 1644: {'lr': 0.0004989283617165479, 'samples': 842240, 'steps': 1644, 'batch_loss/train': 0.9084221813827753} +12/21/2021 19:43:19 - INFO - codeparrot_training - Step 1645: {'lr': 0.0004989269032611809, 'samples': 842752, 'steps': 1645, 'batch_loss/train': 0.8558106198906898} +12/21/2021 19:43:29 - INFO - codeparrot_training - Step 1646: {'lr': 0.0004989254438161746, 'samples': 843264, 'steps': 1646, 'batch_loss/train': 0.9146951215807348} +12/21/2021 19:43:40 - INFO - codeparrot_training - Step 1647: {'lr': 0.0004989239833815347, 'samples': 843776, 'steps': 1647, 'batch_loss/train': 0.9020987655967474} +12/21/2021 19:43:52 - INFO - codeparrot_training - Step 1648: {'lr': 0.000498922521957267, 'samples': 844288, 'steps': 1648, 'batch_loss/train': 1.034890677779913} +12/21/2021 19:44:03 - INFO - codeparrot_training - Step 1649: {'lr': 0.0004989210595433774, 'samples': 844800, 'steps': 1649, 'batch_loss/train': 0.8290247670374811} +12/21/2021 19:44:14 - INFO - codeparrot_training - Step 1650: {'lr': 0.0004989195961398716, 'samples': 845312, 'steps': 1650, 'batch_loss/train': 0.9488262673839927} +12/21/2021 19:44:26 - INFO - codeparrot_training - Step 1651: {'lr': 0.0004989181317467555, 'samples': 845824, 'steps': 1651, 'batch_loss/train': 1.0439008735120296} +12/21/2021 19:44:36 - INFO - codeparrot_training - Step 1652: {'lr': 0.0004989166663640349, 'samples': 846336, 'steps': 1652, 'batch_loss/train': 0.9392561241984367} +12/21/2021 19:44:47 - INFO - codeparrot_training - Step 1653: {'lr': 0.0004989151999917156, 'samples': 846848, 'steps': 1653, 'batch_loss/train': 1.0095143702346832} +12/21/2021 19:44:58 - INFO - codeparrot_training - Step 1654: {'lr': 0.0004989137326298035, 'samples': 847360, 'steps': 1654, 'batch_loss/train': 0.8812794545665383} +12/21/2021 19:45:10 - INFO - codeparrot_training - Step 1655: {'lr': 0.0004989122642783044, 'samples': 847872, 'steps': 1655, 'batch_loss/train': 0.9923330219462514} +12/21/2021 19:45:21 - INFO - codeparrot_training - Step 1656: {'lr': 0.000498910794937224, 'samples': 848384, 'steps': 1656, 'batch_loss/train': 1.0809170519933105} +12/21/2021 19:45:31 - INFO - codeparrot_training - Step 1657: {'lr': 0.0004989093246065684, 'samples': 848896, 'steps': 1657, 'batch_loss/train': 0.9620170630514622} +12/21/2021 19:45:44 - INFO - codeparrot_training - Step 1658: {'lr': 0.0004989078532863432, 'samples': 849408, 'steps': 1658, 'batch_loss/train': 1.0742413885891438} +12/21/2021 19:45:54 - INFO - codeparrot_training - Step 1659: {'lr': 0.0004989063809765545, 'samples': 849920, 'steps': 1659, 'batch_loss/train': 0.8559922957792878} +12/21/2021 19:46:05 - INFO - codeparrot_training - Step 1660: {'lr': 0.0004989049076772079, 'samples': 850432, 'steps': 1660, 'batch_loss/train': 0.843426818959415} +12/21/2021 19:46:17 - INFO - codeparrot_training - Step 1661: {'lr': 0.0004989034333883094, 'samples': 850944, 'steps': 1661, 'batch_loss/train': 0.8882105117663741} +12/21/2021 19:46:28 - INFO - codeparrot_training - Step 1662: {'lr': 0.0004989019581098648, 'samples': 851456, 'steps': 1662, 'batch_loss/train': 0.8527037426829338} +12/21/2021 19:46:39 - INFO - codeparrot_training - Step 1663: {'lr': 0.00049890048184188, 'samples': 851968, 'steps': 1663, 'batch_loss/train': 0.992137317545712} +12/21/2021 19:46:51 - INFO - codeparrot_training - Step 1664: {'lr': 0.0004988990045843609, 'samples': 852480, 'steps': 1664, 'batch_loss/train': 0.8860413506627083} +12/21/2021 19:47:01 - INFO - codeparrot_training - Step 1665: {'lr': 0.0004988975263373134, 'samples': 852992, 'steps': 1665, 'batch_loss/train': 0.8462892472743988} +12/21/2021 19:47:12 - INFO - codeparrot_training - Step 1666: {'lr': 0.0004988960471007432, 'samples': 853504, 'steps': 1666, 'batch_loss/train': 0.9574611871503294} +12/21/2021 19:47:23 - INFO - codeparrot_training - Step 1667: {'lr': 0.0004988945668746563, 'samples': 854016, 'steps': 1667, 'batch_loss/train': 0.9716064305976033} +12/21/2021 19:47:35 - INFO - codeparrot_training - Step 1668: {'lr': 0.0004988930856590587, 'samples': 854528, 'steps': 1668, 'batch_loss/train': 0.9574818797409534} +12/21/2021 19:47:45 - INFO - codeparrot_training - Step 1669: {'lr': 0.000498891603453956, 'samples': 855040, 'steps': 1669, 'batch_loss/train': 0.8710432769730687} +12/21/2021 19:47:56 - INFO - codeparrot_training - Step 1670: {'lr': 0.0004988901202593544, 'samples': 855552, 'steps': 1670, 'batch_loss/train': 0.9373943191021681} +12/21/2021 19:48:09 - INFO - codeparrot_training - Step 1671: {'lr': 0.0004988886360752596, 'samples': 856064, 'steps': 1671, 'batch_loss/train': 0.9859117092564702} +12/21/2021 19:48:19 - INFO - codeparrot_training - Step 1672: {'lr': 0.0004988871509016776, 'samples': 856576, 'steps': 1672, 'batch_loss/train': 0.8840123564004898} +12/21/2021 19:48:30 - INFO - codeparrot_training - Step 1673: {'lr': 0.0004988856647386142, 'samples': 857088, 'steps': 1673, 'batch_loss/train': 0.9437036588788033} +12/21/2021 19:48:42 - INFO - codeparrot_training - Step 1674: {'lr': 0.0004988841775860754, 'samples': 857600, 'steps': 1674, 'batch_loss/train': 1.0447020791471004} +12/21/2021 19:48:52 - INFO - codeparrot_training - Step 1675: {'lr': 0.0004988826894440671, 'samples': 858112, 'steps': 1675, 'batch_loss/train': 1.3184889201074839} +12/21/2021 19:49:03 - INFO - codeparrot_training - Step 1676: {'lr': 0.0004988812003125952, 'samples': 858624, 'steps': 1676, 'batch_loss/train': 0.9408456021919847} +12/21/2021 19:49:14 - INFO - codeparrot_training - Step 1677: {'lr': 0.0004988797101916656, 'samples': 859136, 'steps': 1677, 'batch_loss/train': 0.8162884362973273} +12/21/2021 19:49:27 - INFO - codeparrot_training - Step 1678: {'lr': 0.0004988782190812843, 'samples': 859648, 'steps': 1678, 'batch_loss/train': 0.9030062723904848} +12/21/2021 19:49:37 - INFO - codeparrot_training - Step 1679: {'lr': 0.0004988767269814571, 'samples': 860160, 'steps': 1679, 'batch_loss/train': 0.88985241856426} +12/21/2021 19:49:48 - INFO - codeparrot_training - Step 1680: {'lr': 0.00049887523389219, 'samples': 860672, 'steps': 1680, 'batch_loss/train': 0.8859491515904665} +12/21/2021 19:50:00 - INFO - codeparrot_training - Step 1681: {'lr': 0.000498873739813489, 'samples': 861184, 'steps': 1681, 'batch_loss/train': 0.9164216630160809} +12/21/2021 19:50:11 - INFO - codeparrot_training - Step 1682: {'lr': 0.00049887224474536, 'samples': 861696, 'steps': 1682, 'batch_loss/train': 1.0542579526081681} +12/21/2021 19:50:21 - INFO - codeparrot_training - Step 1683: {'lr': 0.0004988707486878089, 'samples': 862208, 'steps': 1683, 'batch_loss/train': 0.9508535517379642} +12/21/2021 19:50:32 - INFO - codeparrot_training - Step 1684: {'lr': 0.0004988692516408416, 'samples': 862720, 'steps': 1684, 'batch_loss/train': 0.8899596398696303} +12/21/2021 19:50:44 - INFO - codeparrot_training - Step 1685: {'lr': 0.0004988677536044643, 'samples': 863232, 'steps': 1685, 'batch_loss/train': 0.9636764544993639} +12/21/2021 19:50:55 - INFO - codeparrot_training - Step 1686: {'lr': 0.0004988662545786826, 'samples': 863744, 'steps': 1686, 'batch_loss/train': 0.8935754783451557} +12/21/2021 19:51:05 - INFO - codeparrot_training - Step 1687: {'lr': 0.0004988647545635027, 'samples': 864256, 'steps': 1687, 'batch_loss/train': 0.8424288369715214} +12/21/2021 19:51:17 - INFO - codeparrot_training - Step 1688: {'lr': 0.0004988632535589306, 'samples': 864768, 'steps': 1688, 'batch_loss/train': 1.0158801395446062} +12/21/2021 19:51:28 - INFO - codeparrot_training - Step 1689: {'lr': 0.000498861751564972, 'samples': 865280, 'steps': 1689, 'batch_loss/train': 0.9659739769995213} +12/21/2021 19:51:38 - INFO - codeparrot_training - Step 1690: {'lr': 0.0004988602485816332, 'samples': 865792, 'steps': 1690, 'batch_loss/train': 0.9077348466962576} +12/21/2021 19:51:51 - INFO - codeparrot_training - Step 1691: {'lr': 0.0004988587446089199, 'samples': 866304, 'steps': 1691, 'batch_loss/train': 0.8862872291356325} +12/21/2021 19:52:02 - INFO - codeparrot_training - Step 1692: {'lr': 0.0004988572396468383, 'samples': 866816, 'steps': 1692, 'batch_loss/train': 0.8973912801593542} +12/21/2021 19:52:13 - INFO - codeparrot_training - Step 1693: {'lr': 0.0004988557336953941, 'samples': 867328, 'steps': 1693, 'batch_loss/train': 0.8899939404800534} +12/21/2021 19:52:23 - INFO - codeparrot_training - Step 1694: {'lr': 0.0004988542267545936, 'samples': 867840, 'steps': 1694, 'batch_loss/train': 0.9546453291550279} +12/21/2021 19:52:35 - INFO - codeparrot_training - Step 1695: {'lr': 0.0004988527188244427, 'samples': 868352, 'steps': 1695, 'batch_loss/train': 1.0924792597070336} +12/21/2021 19:52:46 - INFO - codeparrot_training - Step 1696: {'lr': 0.0004988512099049473, 'samples': 868864, 'steps': 1696, 'batch_loss/train': 0.7072330936789513} +12/21/2021 19:52:57 - INFO - codeparrot_training - Step 1697: {'lr': 0.0004988496999961134, 'samples': 869376, 'steps': 1697, 'batch_loss/train': 0.9214607588946819} +12/21/2021 19:53:09 - INFO - codeparrot_training - Step 1698: {'lr': 0.0004988481890979471, 'samples': 869888, 'steps': 1698, 'batch_loss/train': 0.9209415949881077} +12/21/2021 19:53:19 - INFO - codeparrot_training - Step 1699: {'lr': 0.0004988466772104544, 'samples': 870400, 'steps': 1699, 'batch_loss/train': 0.9891600273549557} +12/21/2021 19:53:30 - INFO - codeparrot_training - Step 1700: {'lr': 0.0004988451643336412, 'samples': 870912, 'steps': 1700, 'batch_loss/train': 1.0022788783535361} +12/21/2021 19:53:43 - INFO - codeparrot_training - Step 1701: {'lr': 0.0004988436504675136, 'samples': 871424, 'steps': 1701, 'batch_loss/train': 0.9143160805106163} +12/21/2021 19:53:53 - INFO - codeparrot_training - Step 1702: {'lr': 0.0004988421356120777, 'samples': 871936, 'steps': 1702, 'batch_loss/train': 1.230787705630064} +12/21/2021 19:54:04 - INFO - codeparrot_training - Step 1703: {'lr': 0.0004988406197673394, 'samples': 872448, 'steps': 1703, 'batch_loss/train': 0.8632575068622828} +12/21/2021 19:54:16 - INFO - codeparrot_training - Step 1704: {'lr': 0.0004988391029333047, 'samples': 872960, 'steps': 1704, 'batch_loss/train': 0.8298462247475982} +12/21/2021 19:54:27 - INFO - codeparrot_training - Step 1705: {'lr': 0.0004988375851099797, 'samples': 873472, 'steps': 1705, 'batch_loss/train': 0.9333933675661683} +12/21/2021 19:54:37 - INFO - codeparrot_training - Step 1706: {'lr': 0.0004988360662973704, 'samples': 873984, 'steps': 1706, 'batch_loss/train': 0.8585763769224286} +12/21/2021 19:54:48 - INFO - codeparrot_training - Step 1707: {'lr': 0.0004988345464954828, 'samples': 874496, 'steps': 1707, 'batch_loss/train': 0.8557607689872384} +12/21/2021 19:55:01 - INFO - codeparrot_training - Step 1708: {'lr': 0.0004988330257043231, 'samples': 875008, 'steps': 1708, 'batch_loss/train': 0.9054092988371849} +12/21/2021 19:55:11 - INFO - codeparrot_training - Step 1709: {'lr': 0.0004988315039238972, 'samples': 875520, 'steps': 1709, 'batch_loss/train': 0.9229631670750678} +12/21/2021 19:55:22 - INFO - codeparrot_training - Step 1710: {'lr': 0.0004988299811542113, 'samples': 876032, 'steps': 1710, 'batch_loss/train': 0.9498990289866924} +12/21/2021 19:55:34 - INFO - codeparrot_training - Step 1711: {'lr': 0.0004988284573952712, 'samples': 876544, 'steps': 1711, 'batch_loss/train': 0.9152682507410645} +12/21/2021 19:55:45 - INFO - codeparrot_training - Step 1712: {'lr': 0.0004988269326470832, 'samples': 877056, 'steps': 1712, 'batch_loss/train': 0.7151929382234812} +12/21/2021 19:55:55 - INFO - codeparrot_training - Step 1713: {'lr': 0.0004988254069096533, 'samples': 877568, 'steps': 1713, 'batch_loss/train': 0.9512526281177998} +12/21/2021 19:56:07 - INFO - codeparrot_training - Step 1714: {'lr': 0.0004988238801829874, 'samples': 878080, 'steps': 1714, 'batch_loss/train': 0.9871786087751389} +12/21/2021 19:56:18 - INFO - codeparrot_training - Step 1715: {'lr': 0.0004988223524670918, 'samples': 878592, 'steps': 1715, 'batch_loss/train': 1.059871178586036} +12/21/2021 19:56:29 - INFO - codeparrot_training - Step 1716: {'lr': 0.0004988208237619725, 'samples': 879104, 'steps': 1716, 'batch_loss/train': 0.9195260615088046} +12/21/2021 19:56:39 - INFO - codeparrot_training - Step 1717: {'lr': 0.0004988192940676355, 'samples': 879616, 'steps': 1717, 'batch_loss/train': 1.01906623179093} +12/21/2021 19:56:52 - INFO - codeparrot_training - Step 1718: {'lr': 0.0004988177633840869, 'samples': 880128, 'steps': 1718, 'batch_loss/train': 0.9443487338721752} +12/21/2021 19:57:03 - INFO - codeparrot_training - Step 1719: {'lr': 0.0004988162317113329, 'samples': 880640, 'steps': 1719, 'batch_loss/train': 0.9177821390330791} +12/21/2021 19:57:13 - INFO - codeparrot_training - Step 1720: {'lr': 0.0004988146990493795, 'samples': 881152, 'steps': 1720, 'batch_loss/train': 0.8549966989085078} +12/21/2021 19:57:25 - INFO - codeparrot_training - Step 1721: {'lr': 0.0004988131653982327, 'samples': 881664, 'steps': 1721, 'batch_loss/train': 0.8796768421307206} +12/21/2021 19:57:36 - INFO - codeparrot_training - Step 1722: {'lr': 0.0004988116307578989, 'samples': 882176, 'steps': 1722, 'batch_loss/train': 0.8261880306527019} +12/21/2021 19:57:46 - INFO - codeparrot_training - Step 1723: {'lr': 0.0004988100951283838, 'samples': 882688, 'steps': 1723, 'batch_loss/train': 0.9156139930710196} +12/21/2021 19:57:58 - INFO - codeparrot_training - Step 1724: {'lr': 0.0004988085585096939, 'samples': 883200, 'steps': 1724, 'batch_loss/train': 0.9225135380402207} +12/21/2021 19:58:09 - INFO - codeparrot_training - Step 1725: {'lr': 0.000498807020901835, 'samples': 883712, 'steps': 1725, 'batch_loss/train': 0.9821897577494383} +12/21/2021 19:58:20 - INFO - codeparrot_training - Step 1726: {'lr': 0.0004988054823048133, 'samples': 884224, 'steps': 1726, 'batch_loss/train': 0.827798436395824} +12/21/2021 19:58:30 - INFO - codeparrot_training - Step 1727: {'lr': 0.0004988039427186351, 'samples': 884736, 'steps': 1727, 'batch_loss/train': 0.9724058229476213} +12/21/2021 19:58:42 - INFO - codeparrot_training - Step 1728: {'lr': 0.0004988024021433063, 'samples': 885248, 'steps': 1728, 'batch_loss/train': 0.91817457228899} +12/21/2021 19:58:53 - INFO - codeparrot_training - Step 1729: {'lr': 0.000498800860578833, 'samples': 885760, 'steps': 1729, 'batch_loss/train': 0.9184758132323623} +12/21/2021 19:59:03 - INFO - codeparrot_training - Step 1730: {'lr': 0.0004987993180252215, 'samples': 886272, 'steps': 1730, 'batch_loss/train': 0.949419878423214} +12/21/2021 19:59:16 - INFO - codeparrot_training - Step 1731: {'lr': 0.0004987977744824779, 'samples': 886784, 'steps': 1731, 'batch_loss/train': 0.9081051032990217} +12/21/2021 19:59:27 - INFO - codeparrot_training - Step 1732: {'lr': 0.0004987962299506082, 'samples': 887296, 'steps': 1732, 'batch_loss/train': 0.9061238206923008} +12/21/2021 19:59:37 - INFO - codeparrot_training - Step 1733: {'lr': 0.0004987946844296187, 'samples': 887808, 'steps': 1733, 'batch_loss/train': 0.9276872025802732} +12/21/2021 19:59:49 - INFO - codeparrot_training - Step 1734: {'lr': 0.0004987931379195154, 'samples': 888320, 'steps': 1734, 'batch_loss/train': 0.9237720146775246} +12/21/2021 20:00:00 - INFO - codeparrot_training - Step 1735: {'lr': 0.0004987915904203045, 'samples': 888832, 'steps': 1735, 'batch_loss/train': 0.9263345254585147} +12/21/2021 20:00:11 - INFO - codeparrot_training - Step 1736: {'lr': 0.0004987900419319923, 'samples': 889344, 'steps': 1736, 'batch_loss/train': 0.8594641145318747} +12/21/2021 20:00:21 - INFO - codeparrot_training - Step 1737: {'lr': 0.0004987884924545847, 'samples': 889856, 'steps': 1737, 'batch_loss/train': 0.8268601922318339} +12/21/2021 20:00:33 - INFO - codeparrot_training - Step 1738: {'lr': 0.0004987869419880881, 'samples': 890368, 'steps': 1738, 'batch_loss/train': 0.9198745731264353} +12/21/2021 20:00:44 - INFO - codeparrot_training - Step 1739: {'lr': 0.0004987853905325085, 'samples': 890880, 'steps': 1739, 'batch_loss/train': 0.9156588902696967} +12/21/2021 20:00:54 - INFO - codeparrot_training - Step 1740: {'lr': 0.0004987838380878522, 'samples': 891392, 'steps': 1740, 'batch_loss/train': 0.8364510666579008} +12/21/2021 20:01:07 - INFO - codeparrot_training - Step 1741: {'lr': 0.0004987822846541253, 'samples': 891904, 'steps': 1741, 'batch_loss/train': 0.8473976843524724} +12/21/2021 20:01:18 - INFO - codeparrot_training - Step 1742: {'lr': 0.0004987807302313338, 'samples': 892416, 'steps': 1742, 'batch_loss/train': 0.9717363230884075} +12/21/2021 20:01:29 - INFO - codeparrot_training - Step 1743: {'lr': 0.0004987791748194842, 'samples': 892928, 'steps': 1743, 'batch_loss/train': 0.8751451941207051} +12/21/2021 20:01:41 - INFO - codeparrot_training - Step 1744: {'lr': 0.0004987776184185825, 'samples': 893440, 'steps': 1744, 'batch_loss/train': 0.8930283952504396} +12/21/2021 20:01:51 - INFO - codeparrot_training - Step 1745: {'lr': 0.000498776061028635, 'samples': 893952, 'steps': 1745, 'batch_loss/train': 0.837039184756577} +12/21/2021 20:02:02 - INFO - codeparrot_training - Step 1746: {'lr': 0.0004987745026496477, 'samples': 894464, 'steps': 1746, 'batch_loss/train': 0.8130146125331521} +12/21/2021 20:02:13 - INFO - codeparrot_training - Step 1747: {'lr': 0.0004987729432816269, 'samples': 894976, 'steps': 1747, 'batch_loss/train': 1.0292198001407087} +12/21/2021 20:02:25 - INFO - codeparrot_training - Step 1748: {'lr': 0.0004987713829245789, 'samples': 895488, 'steps': 1748, 'batch_loss/train': 1.024717700202018} +12/21/2021 20:02:36 - INFO - codeparrot_training - Step 1749: {'lr': 0.0004987698215785098, 'samples': 896000, 'steps': 1749, 'batch_loss/train': 0.8937940374016762} +12/21/2021 20:02:47 - INFO - codeparrot_training - Step 1750: {'lr': 0.0004987682592434258, 'samples': 896512, 'steps': 1750, 'batch_loss/train': 0.7694371391553432} +12/21/2021 20:02:59 - INFO - codeparrot_training - Step 1751: {'lr': 0.0004987666959193331, 'samples': 897024, 'steps': 1751, 'batch_loss/train': 0.9619645616039634} +12/21/2021 20:03:10 - INFO - codeparrot_training - Step 1752: {'lr': 0.000498765131606238, 'samples': 897536, 'steps': 1752, 'batch_loss/train': 0.6888892548158765} +12/21/2021 20:03:20 - INFO - codeparrot_training - Step 1753: {'lr': 0.0004987635663041466, 'samples': 898048, 'steps': 1753, 'batch_loss/train': 0.8204534322721884} +12/21/2021 20:03:32 - INFO - codeparrot_training - Step 1754: {'lr': 0.0004987620000130653, 'samples': 898560, 'steps': 1754, 'batch_loss/train': 1.2819089908152819} +12/21/2021 20:03:43 - INFO - codeparrot_training - Step 1755: {'lr': 0.0004987604327330002, 'samples': 899072, 'steps': 1755, 'batch_loss/train': 0.8924822164699435} +12/21/2021 20:03:53 - INFO - codeparrot_training - Step 1756: {'lr': 0.0004987588644639574, 'samples': 899584, 'steps': 1756, 'batch_loss/train': 0.8796547055244446} +12/21/2021 20:04:04 - INFO - codeparrot_training - Step 1757: {'lr': 0.0004987572952059435, 'samples': 900096, 'steps': 1757, 'batch_loss/train': 0.8518597921356559} +12/21/2021 20:04:17 - INFO - codeparrot_training - Step 1758: {'lr': 0.0004987557249589644, 'samples': 900608, 'steps': 1758, 'batch_loss/train': 0.9156473223119974} +12/21/2021 20:04:28 - INFO - codeparrot_training - Step 1759: {'lr': 0.0004987541537230265, 'samples': 901120, 'steps': 1759, 'batch_loss/train': 0.8706931555643678} +12/21/2021 20:04:38 - INFO - codeparrot_training - Step 1760: {'lr': 0.000498752581498136, 'samples': 901632, 'steps': 1760, 'batch_loss/train': 0.7410661680623889} +12/21/2021 20:04:50 - INFO - codeparrot_training - Step 1761: {'lr': 0.0004987510082842991, 'samples': 902144, 'steps': 1761, 'batch_loss/train': 0.844179111532867} +12/21/2021 20:05:01 - INFO - codeparrot_training - Step 1762: {'lr': 0.0004987494340815221, 'samples': 902656, 'steps': 1762, 'batch_loss/train': 0.8779253354296088} +12/21/2021 20:05:11 - INFO - codeparrot_training - Step 1763: {'lr': 0.0004987478588898113, 'samples': 903168, 'steps': 1763, 'batch_loss/train': 0.8999108620919287} +12/21/2021 20:05:25 - INFO - codeparrot_training - Step 1764: {'lr': 0.000498746282709173, 'samples': 903680, 'steps': 1764, 'batch_loss/train': 0.8654523342847824} +12/21/2021 20:05:36 - INFO - codeparrot_training - Step 1765: {'lr': 0.0004987447055396134, 'samples': 904192, 'steps': 1765, 'batch_loss/train': 0.9194268696010113} +12/21/2021 20:05:46 - INFO - codeparrot_training - Step 1766: {'lr': 0.0004987431273811388, 'samples': 904704, 'steps': 1766, 'batch_loss/train': 0.8822370981797576} +12/21/2021 20:05:57 - INFO - codeparrot_training - Step 1767: {'lr': 0.0004987415482337554, 'samples': 905216, 'steps': 1767, 'batch_loss/train': 0.8363524139858782} +12/21/2021 20:06:09 - INFO - codeparrot_training - Step 1768: {'lr': 0.0004987399680974695, 'samples': 905728, 'steps': 1768, 'batch_loss/train': 0.910089218756184} +12/21/2021 20:06:20 - INFO - codeparrot_training - Step 1769: {'lr': 0.0004987383869722874, 'samples': 906240, 'steps': 1769, 'batch_loss/train': 1.5741720795631409} +12/21/2021 20:06:30 - INFO - codeparrot_training - Step 1770: {'lr': 0.0004987368048582154, 'samples': 906752, 'steps': 1770, 'batch_loss/train': 0.8993078609928489} +12/21/2021 20:06:42 - INFO - codeparrot_training - Step 1771: {'lr': 0.0004987352217552598, 'samples': 907264, 'steps': 1771, 'batch_loss/train': 0.9222843889147043} +12/21/2021 20:06:53 - INFO - codeparrot_training - Step 1772: {'lr': 0.000498733637663427, 'samples': 907776, 'steps': 1772, 'batch_loss/train': 0.8354229419492185} +12/21/2021 20:07:04 - INFO - codeparrot_training - Step 1773: {'lr': 0.000498732052582723, 'samples': 908288, 'steps': 1773, 'batch_loss/train': 0.9040659051388502} +12/21/2021 20:07:16 - INFO - codeparrot_training - Step 1774: {'lr': 0.0004987304665131542, 'samples': 908800, 'steps': 1774, 'batch_loss/train': 0.9690363425761461} +12/21/2021 20:07:27 - INFO - codeparrot_training - Step 1775: {'lr': 0.0004987288794547272, 'samples': 909312, 'steps': 1775, 'batch_loss/train': 0.89034179225564} +12/21/2021 20:07:38 - INFO - codeparrot_training - Step 1776: {'lr': 0.000498727291407448, 'samples': 909824, 'steps': 1776, 'batch_loss/train': 0.9229135345667601} +12/21/2021 20:07:48 - INFO - codeparrot_training - Step 1777: {'lr': 0.000498725702371323, 'samples': 910336, 'steps': 1777, 'batch_loss/train': 0.8124324735254049} +12/21/2021 20:08:00 - INFO - codeparrot_training - Step 1778: {'lr': 0.0004987241123463585, 'samples': 910848, 'steps': 1778, 'batch_loss/train': 0.8551133973523974} +12/21/2021 20:08:11 - INFO - codeparrot_training - Step 1779: {'lr': 0.0004987225213325609, 'samples': 911360, 'steps': 1779, 'batch_loss/train': 0.879351211944595} +12/21/2021 20:08:21 - INFO - codeparrot_training - Step 1780: {'lr': 0.0004987209293299365, 'samples': 911872, 'steps': 1780, 'batch_loss/train': 0.9372209943830967} +12/21/2021 20:08:34 - INFO - codeparrot_training - Step 1781: {'lr': 0.0004987193363384916, 'samples': 912384, 'steps': 1781, 'batch_loss/train': 0.9684248957782984} +12/21/2021 20:08:45 - INFO - codeparrot_training - Step 1782: {'lr': 0.0004987177423582324, 'samples': 912896, 'steps': 1782, 'batch_loss/train': 0.8334794277325273} +12/21/2021 20:08:55 - INFO - codeparrot_training - Step 1783: {'lr': 0.0004987161473891655, 'samples': 913408, 'steps': 1783, 'batch_loss/train': 0.9344437965191901} +12/21/2021 20:09:08 - INFO - codeparrot_training - Step 1784: {'lr': 0.000498714551431297, 'samples': 913920, 'steps': 1784, 'batch_loss/train': 0.8825885057449341} +12/21/2021 20:09:18 - INFO - codeparrot_training - Step 1785: {'lr': 0.0004987129544846334, 'samples': 914432, 'steps': 1785, 'batch_loss/train': 0.8822074355557561} +12/21/2021 20:09:29 - INFO - codeparrot_training - Step 1786: {'lr': 0.000498711356549181, 'samples': 914944, 'steps': 1786, 'batch_loss/train': 0.8603976527228951} +12/21/2021 20:09:40 - INFO - codeparrot_training - Step 1787: {'lr': 0.0004987097576249462, 'samples': 915456, 'steps': 1787, 'batch_loss/train': 0.8634001165628433} +12/21/2021 20:09:52 - INFO - codeparrot_training - Step 1788: {'lr': 0.0004987081577119353, 'samples': 915968, 'steps': 1788, 'batch_loss/train': 0.959977783029899} +12/21/2021 20:10:02 - INFO - codeparrot_training - Step 1789: {'lr': 0.0004987065568101546, 'samples': 916480, 'steps': 1789, 'batch_loss/train': 0.902426291257143} +12/21/2021 20:10:13 - INFO - codeparrot_training - Step 1790: {'lr': 0.0004987049549196106, 'samples': 916992, 'steps': 1790, 'batch_loss/train': 0.9163969978690147} +12/21/2021 20:10:25 - INFO - codeparrot_training - Step 1791: {'lr': 0.0004987033520403096, 'samples': 917504, 'steps': 1791, 'batch_loss/train': 0.8881054315716028} +12/21/2021 20:10:36 - INFO - codeparrot_training - Step 1792: {'lr': 0.000498701748172258, 'samples': 918016, 'steps': 1792, 'batch_loss/train': 0.9543562810868025} +12/21/2021 20:10:47 - INFO - codeparrot_training - Step 1793: {'lr': 0.0004987001433154622, 'samples': 918528, 'steps': 1793, 'batch_loss/train': 0.9367486266419291} +12/21/2021 20:10:59 - INFO - codeparrot_training - Step 1794: {'lr': 0.0004986985374699285, 'samples': 919040, 'steps': 1794, 'batch_loss/train': 0.9013417465612292} +12/21/2021 20:11:09 - INFO - codeparrot_training - Step 1795: {'lr': 0.0004986969306356632, 'samples': 919552, 'steps': 1795, 'batch_loss/train': 0.8312400672584772} +12/21/2021 20:11:20 - INFO - codeparrot_training - Step 1796: {'lr': 0.0004986953228126729, 'samples': 920064, 'steps': 1796, 'batch_loss/train': 0.9092927593737841} +12/21/2021 20:11:31 - INFO - codeparrot_training - Step 1797: {'lr': 0.000498693714000964, 'samples': 920576, 'steps': 1797, 'batch_loss/train': 0.9284424344077706} +12/21/2021 20:11:43 - INFO - codeparrot_training - Step 1798: {'lr': 0.0004986921042005425, 'samples': 921088, 'steps': 1798, 'batch_loss/train': 0.7865123264491558} +12/21/2021 20:11:53 - INFO - codeparrot_training - Step 1799: {'lr': 0.0004986904934114153, 'samples': 921600, 'steps': 1799, 'batch_loss/train': 0.9214104975108057} +12/21/2021 20:12:04 - INFO - codeparrot_training - Step 1800: {'lr': 0.0004986888816335887, 'samples': 922112, 'steps': 1800, 'batch_loss/train': 0.9327549645677209} +12/21/2021 20:12:16 - INFO - codeparrot_training - Step 1801: {'lr': 0.0004986872688670688, 'samples': 922624, 'steps': 1801, 'batch_loss/train': 0.9589235652238131} +12/21/2021 20:12:26 - INFO - codeparrot_training - Step 1802: {'lr': 0.0004986856551118623, 'samples': 923136, 'steps': 1802, 'batch_loss/train': 0.9533125841990113} +12/21/2021 20:12:37 - INFO - codeparrot_training - Step 1803: {'lr': 0.0004986840403679757, 'samples': 923648, 'steps': 1803, 'batch_loss/train': 0.8772525256499648} +12/21/2021 20:12:50 - INFO - codeparrot_training - Step 1804: {'lr': 0.000498682424635415, 'samples': 924160, 'steps': 1804, 'batch_loss/train': 0.9441607277840376} +12/21/2021 20:13:00 - INFO - codeparrot_training - Step 1805: {'lr': 0.000498680807914187, 'samples': 924672, 'steps': 1805, 'batch_loss/train': 0.8214109698310494} +12/21/2021 20:13:11 - INFO - codeparrot_training - Step 1806: {'lr': 0.000498679190204298, 'samples': 925184, 'steps': 1806, 'batch_loss/train': 0.966174628585577} +12/21/2021 20:13:23 - INFO - codeparrot_training - Step 1807: {'lr': 0.0004986775715057544, 'samples': 925696, 'steps': 1807, 'batch_loss/train': 0.8874991219490767} +12/21/2021 20:13:34 - INFO - codeparrot_training - Step 1808: {'lr': 0.0004986759518185627, 'samples': 926208, 'steps': 1808, 'batch_loss/train': 0.950497136451304} +12/21/2021 20:13:44 - INFO - codeparrot_training - Step 1809: {'lr': 0.0004986743311427294, 'samples': 926720, 'steps': 1809, 'batch_loss/train': 0.8621352333575487} +12/21/2021 20:13:55 - INFO - codeparrot_training - Step 1810: {'lr': 0.0004986727094782608, 'samples': 927232, 'steps': 1810, 'batch_loss/train': 0.9612332861870527} +12/21/2021 20:14:07 - INFO - codeparrot_training - Step 1811: {'lr': 0.0004986710868251633, 'samples': 927744, 'steps': 1811, 'batch_loss/train': 0.897529099136591} +12/21/2021 20:14:18 - INFO - codeparrot_training - Step 1812: {'lr': 0.0004986694631834435, 'samples': 928256, 'steps': 1812, 'batch_loss/train': 0.7724511902779341} +12/21/2021 20:14:29 - INFO - codeparrot_training - Step 1813: {'lr': 0.0004986678385531079, 'samples': 928768, 'steps': 1813, 'batch_loss/train': 0.968490356579423} +12/21/2021 20:14:41 - INFO - codeparrot_training - Step 1814: {'lr': 0.0004986662129341629, 'samples': 929280, 'steps': 1814, 'batch_loss/train': 0.9682283839210868} +12/21/2021 20:14:51 - INFO - codeparrot_training - Step 1815: {'lr': 0.0004986645863266148, 'samples': 929792, 'steps': 1815, 'batch_loss/train': 0.8703804188407958} +12/21/2021 20:15:02 - INFO - codeparrot_training - Step 1816: {'lr': 0.0004986629587304702, 'samples': 930304, 'steps': 1816, 'batch_loss/train': 0.8031561346724629} +12/21/2021 20:15:14 - INFO - codeparrot_training - Step 1817: {'lr': 0.0004986613301457356, 'samples': 930816, 'steps': 1817, 'batch_loss/train': 0.9431584663689137} +12/21/2021 20:15:24 - INFO - codeparrot_training - Step 1818: {'lr': 0.0004986597005724174, 'samples': 931328, 'steps': 1818, 'batch_loss/train': 0.828075626399368} +12/21/2021 20:15:35 - INFO - codeparrot_training - Step 1819: {'lr': 0.0004986580700105221, 'samples': 931840, 'steps': 1819, 'batch_loss/train': 0.8400483876466751} +12/21/2021 20:15:46 - INFO - codeparrot_training - Step 1820: {'lr': 0.0004986564384600563, 'samples': 932352, 'steps': 1820, 'batch_loss/train': 0.8771297363564372} +12/21/2021 20:15:58 - INFO - codeparrot_training - Step 1821: {'lr': 0.0004986548059210264, 'samples': 932864, 'steps': 1821, 'batch_loss/train': 0.814026640728116} +12/21/2021 20:16:09 - INFO - codeparrot_training - Step 1822: {'lr': 0.0004986531723934388, 'samples': 933376, 'steps': 1822, 'batch_loss/train': 0.9399106362834573} +12/21/2021 20:16:20 - INFO - codeparrot_training - Step 1823: {'lr': 0.0004986515378773001, 'samples': 933888, 'steps': 1823, 'batch_loss/train': 0.9374174429103732} +12/21/2021 20:16:32 - INFO - codeparrot_training - Step 1824: {'lr': 0.0004986499023726167, 'samples': 934400, 'steps': 1824, 'batch_loss/train': 0.8607952566817403} +12/21/2021 20:16:42 - INFO - codeparrot_training - Step 1825: {'lr': 0.0004986482658793953, 'samples': 934912, 'steps': 1825, 'batch_loss/train': 0.8238134458661079} +12/21/2021 20:16:53 - INFO - codeparrot_training - Step 1826: {'lr': 0.0004986466283976423, 'samples': 935424, 'steps': 1826, 'batch_loss/train': 0.9723093062639236} +12/21/2021 20:17:06 - INFO - codeparrot_training - Step 1827: {'lr': 0.0004986449899273641, 'samples': 935936, 'steps': 1827, 'batch_loss/train': 0.9604789754375815} +12/21/2021 20:17:16 - INFO - codeparrot_training - Step 1828: {'lr': 0.0004986433504685674, 'samples': 936448, 'steps': 1828, 'batch_loss/train': 0.812616216018796} +12/21/2021 20:17:27 - INFO - codeparrot_training - Step 1829: {'lr': 0.0004986417100212584, 'samples': 936960, 'steps': 1829, 'batch_loss/train': 0.9892189031234011} +12/21/2021 20:17:39 - INFO - codeparrot_training - Step 1830: {'lr': 0.0004986400685854441, 'samples': 937472, 'steps': 1830, 'batch_loss/train': 0.8710268647409976} +12/21/2021 20:17:49 - INFO - codeparrot_training - Step 1831: {'lr': 0.0004986384261611306, 'samples': 937984, 'steps': 1831, 'batch_loss/train': 0.9007011516951025} +12/21/2021 20:18:00 - INFO - codeparrot_training - Step 1832: {'lr': 0.0004986367827483247, 'samples': 938496, 'steps': 1832, 'batch_loss/train': 0.8797266194596887} +12/21/2021 20:18:11 - INFO - codeparrot_training - Step 1833: {'lr': 0.0004986351383470328, 'samples': 939008, 'steps': 1833, 'batch_loss/train': 0.9119500303640962} +12/21/2021 20:18:23 - INFO - codeparrot_training - Step 1834: {'lr': 0.0004986334929572614, 'samples': 939520, 'steps': 1834, 'batch_loss/train': 0.8960771700367332} +12/21/2021 20:18:34 - INFO - codeparrot_training - Step 1835: {'lr': 0.0004986318465790173, 'samples': 940032, 'steps': 1835, 'batch_loss/train': 0.7886189089622349} +12/21/2021 20:18:44 - INFO - codeparrot_training - Step 1836: {'lr': 0.0004986301992123067, 'samples': 940544, 'steps': 1836, 'batch_loss/train': 1.0327209755778313} +12/21/2021 20:18:58 - INFO - codeparrot_training - Step 1837: {'lr': 0.0004986285508571364, 'samples': 941056, 'steps': 1837, 'batch_loss/train': 0.8866083342581987} +12/21/2021 20:19:09 - INFO - codeparrot_training - Step 1838: {'lr': 0.0004986269015135128, 'samples': 941568, 'steps': 1838, 'batch_loss/train': 0.9049155563116074} +12/21/2021 20:19:19 - INFO - codeparrot_training - Step 1839: {'lr': 0.0004986252511814424, 'samples': 942080, 'steps': 1839, 'batch_loss/train': 0.8353464880492538} +12/21/2021 20:19:31 - INFO - codeparrot_training - Step 1840: {'lr': 0.0004986235998609321, 'samples': 942592, 'steps': 1840, 'batch_loss/train': 0.9126728833653033} +12/21/2021 20:19:42 - INFO - codeparrot_training - Step 1841: {'lr': 0.0004986219475519881, 'samples': 943104, 'steps': 1841, 'batch_loss/train': 0.7925252560526133} +12/21/2021 20:19:53 - INFO - codeparrot_training - Step 1842: {'lr': 0.0004986202942546172, 'samples': 943616, 'steps': 1842, 'batch_loss/train': 0.8110767262987792} +12/21/2021 20:20:03 - INFO - codeparrot_training - Step 1843: {'lr': 0.0004986186399688258, 'samples': 944128, 'steps': 1843, 'batch_loss/train': 0.8096279329620302} +12/21/2021 20:20:15 - INFO - codeparrot_training - Step 1844: {'lr': 0.0004986169846946207, 'samples': 944640, 'steps': 1844, 'batch_loss/train': 0.9027986219152808} +12/21/2021 20:20:26 - INFO - codeparrot_training - Step 1845: {'lr': 0.0004986153284320081, 'samples': 945152, 'steps': 1845, 'batch_loss/train': 0.8371636793017387} +12/21/2021 20:20:36 - INFO - codeparrot_training - Step 1846: {'lr': 0.0004986136711809951, 'samples': 945664, 'steps': 1846, 'batch_loss/train': 0.8917500502429903} +12/21/2021 20:20:49 - INFO - codeparrot_training - Step 1847: {'lr': 0.0004986120129415879, 'samples': 946176, 'steps': 1847, 'batch_loss/train': 0.8751227331813425} +12/21/2021 20:20:59 - INFO - codeparrot_training - Step 1848: {'lr': 0.0004986103537137932, 'samples': 946688, 'steps': 1848, 'batch_loss/train': 0.7125408542342484} +12/21/2021 20:21:10 - INFO - codeparrot_training - Step 1849: {'lr': 0.0004986086934976176, 'samples': 947200, 'steps': 1849, 'batch_loss/train': 0.9307743078097701} +12/21/2021 20:21:22 - INFO - codeparrot_training - Step 1850: {'lr': 0.0004986070322930677, 'samples': 947712, 'steps': 1850, 'batch_loss/train': 0.8707185450475663} +12/21/2021 20:21:33 - INFO - codeparrot_training - Step 1851: {'lr': 0.0004986053701001501, 'samples': 948224, 'steps': 1851, 'batch_loss/train': 0.9580605085939169} +12/21/2021 20:21:44 - INFO - codeparrot_training - Step 1852: {'lr': 0.0004986037069188714, 'samples': 948736, 'steps': 1852, 'batch_loss/train': 0.7944429162889719} +12/21/2021 20:21:54 - INFO - codeparrot_training - Step 1853: {'lr': 0.0004986020427492384, 'samples': 949248, 'steps': 1853, 'batch_loss/train': 0.8779871286824346} +12/21/2021 20:22:06 - INFO - codeparrot_training - Step 1854: {'lr': 0.0004986003775912574, 'samples': 949760, 'steps': 1854, 'batch_loss/train': 0.8946093013510108} +12/21/2021 20:22:17 - INFO - codeparrot_training - Step 1855: {'lr': 0.0004985987114449351, 'samples': 950272, 'steps': 1855, 'batch_loss/train': 0.9508311804383993} +12/21/2021 20:22:28 - INFO - codeparrot_training - Step 1856: {'lr': 0.0004985970443102782, 'samples': 950784, 'steps': 1856, 'batch_loss/train': 0.9317941963672638} +12/21/2021 20:22:40 - INFO - codeparrot_training - Step 1857: {'lr': 0.0004985953761872936, 'samples': 951296, 'steps': 1857, 'batch_loss/train': 0.9445256488397717} +12/21/2021 20:22:51 - INFO - codeparrot_training - Step 1858: {'lr': 0.0004985937070759874, 'samples': 951808, 'steps': 1858, 'batch_loss/train': 0.9254755415022373} +12/21/2021 20:23:02 - INFO - codeparrot_training - Step 1859: {'lr': 0.0004985920369763665, 'samples': 952320, 'steps': 1859, 'batch_loss/train': 0.9439691929146647} +12/21/2021 20:23:14 - INFO - codeparrot_training - Step 1860: {'lr': 0.0004985903658884375, 'samples': 952832, 'steps': 1860, 'batch_loss/train': 0.968684159219265} +12/21/2021 20:23:25 - INFO - codeparrot_training - Step 1861: {'lr': 0.0004985886938122071, 'samples': 953344, 'steps': 1861, 'batch_loss/train': 0.7654836247675121} +12/21/2021 20:23:35 - INFO - codeparrot_training - Step 1862: {'lr': 0.000498587020747682, 'samples': 953856, 'steps': 1862, 'batch_loss/train': 1.0087153273634613} +12/21/2021 20:23:46 - INFO - codeparrot_training - Step 1863: {'lr': 0.0004985853466948687, 'samples': 954368, 'steps': 1863, 'batch_loss/train': 0.7112129926681519} +12/21/2021 20:23:58 - INFO - codeparrot_training - Step 1864: {'lr': 0.000498583671653774, 'samples': 954880, 'steps': 1864, 'batch_loss/train': 0.8184665851294994} +12/21/2021 20:24:09 - INFO - codeparrot_training - Step 1865: {'lr': 0.0004985819956244044, 'samples': 955392, 'steps': 1865, 'batch_loss/train': 0.8702767519280314} +12/21/2021 20:24:19 - INFO - codeparrot_training - Step 1866: {'lr': 0.0004985803186067667, 'samples': 955904, 'steps': 1866, 'batch_loss/train': 0.9370418824255466} +12/21/2021 20:24:32 - INFO - codeparrot_training - Step 1867: {'lr': 0.0004985786406008674, 'samples': 956416, 'steps': 1867, 'batch_loss/train': 0.984340375289321} +12/21/2021 20:24:43 - INFO - codeparrot_training - Step 1868: {'lr': 0.0004985769616067135, 'samples': 956928, 'steps': 1868, 'batch_loss/train': 0.9193143034353852} +12/21/2021 20:24:53 - INFO - codeparrot_training - Step 1869: {'lr': 0.0004985752816243112, 'samples': 957440, 'steps': 1869, 'batch_loss/train': 0.8370444281026721} +12/21/2021 20:25:05 - INFO - codeparrot_training - Step 1870: {'lr': 0.0004985736006536676, 'samples': 957952, 'steps': 1870, 'batch_loss/train': 0.9120601594913751} +12/21/2021 20:25:16 - INFO - codeparrot_training - Step 1871: {'lr': 0.0004985719186947891, 'samples': 958464, 'steps': 1871, 'batch_loss/train': 0.9577167658135295} +12/21/2021 20:25:27 - INFO - codeparrot_training - Step 1872: {'lr': 0.0004985702357476827, 'samples': 958976, 'steps': 1872, 'batch_loss/train': 0.8961206395179033} +12/21/2021 20:25:37 - INFO - codeparrot_training - Step 1873: {'lr': 0.0004985685518123547, 'samples': 959488, 'steps': 1873, 'batch_loss/train': 0.8794773784466088} +12/21/2021 20:25:49 - INFO - codeparrot_training - Step 1874: {'lr': 0.0004985668668888121, 'samples': 960000, 'steps': 1874, 'batch_loss/train': 0.8279963359236717} +12/21/2021 20:26:00 - INFO - codeparrot_training - Step 1875: {'lr': 0.0004985651809770615, 'samples': 960512, 'steps': 1875, 'batch_loss/train': 0.925343323033303} +12/21/2021 20:26:10 - INFO - codeparrot_training - Step 1876: {'lr': 0.0004985634940771096, 'samples': 961024, 'steps': 1876, 'batch_loss/train': 0.8192994790151715} +12/21/2021 20:26:22 - INFO - codeparrot_training - Step 1877: {'lr': 0.000498561806188963, 'samples': 961536, 'steps': 1877, 'batch_loss/train': 0.8512080786749721} +12/21/2021 20:26:33 - INFO - codeparrot_training - Step 1878: {'lr': 0.0004985601173126286, 'samples': 962048, 'steps': 1878, 'batch_loss/train': 0.8746908996254206} +12/21/2021 20:26:43 - INFO - codeparrot_training - Step 1879: {'lr': 0.0004985584274481131, 'samples': 962560, 'steps': 1879, 'batch_loss/train': 0.9004705036059022} +12/21/2021 20:26:56 - INFO - codeparrot_training - Step 1880: {'lr': 0.000498556736595423, 'samples': 963072, 'steps': 1880, 'batch_loss/train': 0.8611906156875193} +12/21/2021 20:27:07 - INFO - codeparrot_training - Step 1881: {'lr': 0.0004985550447545652, 'samples': 963584, 'steps': 1881, 'batch_loss/train': 0.7130138901993632} +12/21/2021 20:27:18 - INFO - codeparrot_training - Step 1882: {'lr': 0.0004985533519255464, 'samples': 964096, 'steps': 1882, 'batch_loss/train': 0.834296409972012} +12/21/2021 20:27:28 - INFO - codeparrot_training - Step 1883: {'lr': 0.0004985516581083734, 'samples': 964608, 'steps': 1883, 'batch_loss/train': 0.8909173300489783} +12/21/2021 20:27:40 - INFO - codeparrot_training - Step 1884: {'lr': 0.0004985499633030528, 'samples': 965120, 'steps': 1884, 'batch_loss/train': 0.9050975497812033} +12/21/2021 20:27:51 - INFO - codeparrot_training - Step 1885: {'lr': 0.0004985482675095913, 'samples': 965632, 'steps': 1885, 'batch_loss/train': 0.9018258606083691} +12/21/2021 20:28:01 - INFO - codeparrot_training - Step 1886: {'lr': 0.0004985465707279959, 'samples': 966144, 'steps': 1886, 'batch_loss/train': 0.890422020573169} +12/21/2021 20:28:13 - INFO - codeparrot_training - Step 1887: {'lr': 0.0004985448729582731, 'samples': 966656, 'steps': 1887, 'batch_loss/train': 0.853865951532498} +12/21/2021 20:28:24 - INFO - codeparrot_training - Step 1888: {'lr': 0.0004985431742004297, 'samples': 967168, 'steps': 1888, 'batch_loss/train': 0.8792003877460957} +12/21/2021 20:28:34 - INFO - codeparrot_training - Step 1889: {'lr': 0.0004985414744544726, 'samples': 967680, 'steps': 1889, 'batch_loss/train': 0.9136353489011526} +12/21/2021 20:28:47 - INFO - codeparrot_training - Step 1890: {'lr': 0.0004985397737204084, 'samples': 968192, 'steps': 1890, 'batch_loss/train': 0.8181290803477168} +12/21/2021 20:28:58 - INFO - codeparrot_training - Step 1891: {'lr': 0.0004985380719982438, 'samples': 968704, 'steps': 1891, 'batch_loss/train': 0.8627249887213111} +12/21/2021 20:29:09 - INFO - codeparrot_training - Step 1892: {'lr': 0.0004985363692879858, 'samples': 969216, 'steps': 1892, 'batch_loss/train': 0.9257152266800404} +12/21/2021 20:29:19 - INFO - codeparrot_training - Step 1893: {'lr': 0.000498534665589641, 'samples': 969728, 'steps': 1893, 'batch_loss/train': 0.8799448469653726} +12/21/2021 20:29:31 - INFO - codeparrot_training - Step 1894: {'lr': 0.0004985329609032161, 'samples': 970240, 'steps': 1894, 'batch_loss/train': 0.8503386178053916} +12/21/2021 20:29:42 - INFO - codeparrot_training - Step 1895: {'lr': 0.0004985312552287181, 'samples': 970752, 'steps': 1895, 'batch_loss/train': 0.6699882475659251} +12/21/2021 20:29:53 - INFO - codeparrot_training - Step 1896: {'lr': 0.0004985295485661537, 'samples': 971264, 'steps': 1896, 'batch_loss/train': 0.9669402996078134} +12/21/2021 20:30:05 - INFO - codeparrot_training - Step 1897: {'lr': 0.0004985278409155296, 'samples': 971776, 'steps': 1897, 'batch_loss/train': 0.8014138168655336} +12/21/2021 20:30:16 - INFO - codeparrot_training - Step 1898: {'lr': 0.0004985261322768527, 'samples': 972288, 'steps': 1898, 'batch_loss/train': 0.9111372977495193} +12/21/2021 20:30:27 - INFO - codeparrot_training - Step 1899: {'lr': 0.0004985244226501296, 'samples': 972800, 'steps': 1899, 'batch_loss/train': 0.8931278483942151} +12/21/2021 20:30:37 - INFO - codeparrot_training - Step 1900: {'lr': 0.0004985227120353673, 'samples': 973312, 'steps': 1900, 'batch_loss/train': 0.8450008472427726} +12/21/2021 20:30:49 - INFO - codeparrot_training - Step 1901: {'lr': 0.0004985210004325726, 'samples': 973824, 'steps': 1901, 'batch_loss/train': 0.9265907276421785} +12/21/2021 20:31:00 - INFO - codeparrot_training - Step 1902: {'lr': 0.0004985192878417523, 'samples': 974336, 'steps': 1902, 'batch_loss/train': 0.8288377718999982} +12/21/2021 20:31:11 - INFO - codeparrot_training - Step 1903: {'lr': 0.000498517574262913, 'samples': 974848, 'steps': 1903, 'batch_loss/train': 0.9185442980378866} +12/21/2021 20:31:23 - INFO - codeparrot_training - Step 1904: {'lr': 0.0004985158596960617, 'samples': 975360, 'steps': 1904, 'batch_loss/train': 0.8217168105766177} +12/21/2021 20:31:33 - INFO - codeparrot_training - Step 1905: {'lr': 0.0004985141441412052, 'samples': 975872, 'steps': 1905, 'batch_loss/train': 0.9199291882105172} +12/21/2021 20:31:44 - INFO - codeparrot_training - Step 1906: {'lr': 0.0004985124275983504, 'samples': 976384, 'steps': 1906, 'batch_loss/train': 0.8420824324712157} +12/21/2021 20:31:57 - INFO - codeparrot_training - Step 1907: {'lr': 0.0004985107100675038, 'samples': 976896, 'steps': 1907, 'batch_loss/train': 0.9212402869015932} +12/21/2021 20:32:07 - INFO - codeparrot_training - Step 1908: {'lr': 0.0004985089915486726, 'samples': 977408, 'steps': 1908, 'batch_loss/train': 0.7811398338526487} +12/21/2021 20:32:18 - INFO - codeparrot_training - Step 1909: {'lr': 0.0004985072720418635, 'samples': 977920, 'steps': 1909, 'batch_loss/train': 0.8198994658887386} +12/21/2021 20:32:28 - INFO - codeparrot_training - Step 1910: {'lr': 0.0004985055515470832, 'samples': 978432, 'steps': 1910, 'batch_loss/train': 0.9118938446044922} +12/21/2021 20:32:41 - INFO - codeparrot_training - Step 1911: {'lr': 0.0004985038300643387, 'samples': 978944, 'steps': 1911, 'batch_loss/train': 0.7598953689448535} +12/21/2021 20:32:51 - INFO - codeparrot_training - Step 1912: {'lr': 0.0004985021075936369, 'samples': 979456, 'steps': 1912, 'batch_loss/train': 0.8355837122071534} +12/21/2021 20:33:02 - INFO - codeparrot_training - Step 1913: {'lr': 0.0004985003841349845, 'samples': 979968, 'steps': 1913, 'batch_loss/train': 0.765285518951714} +12/21/2021 20:33:14 - INFO - codeparrot_training - Step 1914: {'lr': 0.0004984986596883883, 'samples': 980480, 'steps': 1914, 'batch_loss/train': 0.8225007951259613} +12/21/2021 20:33:25 - INFO - codeparrot_training - Step 1915: {'lr': 0.0004984969342538553, 'samples': 980992, 'steps': 1915, 'batch_loss/train': 0.9137710812501609} +12/21/2021 20:33:35 - INFO - codeparrot_training - Step 1916: {'lr': 0.0004984952078313925, 'samples': 981504, 'steps': 1916, 'batch_loss/train': 0.8579232050105929} +12/21/2021 20:33:48 - INFO - codeparrot_training - Step 1917: {'lr': 0.0004984934804210064, 'samples': 982016, 'steps': 1917, 'batch_loss/train': 0.8911875681951642} +12/21/2021 20:33:59 - INFO - codeparrot_training - Step 1918: {'lr': 0.0004984917520227041, 'samples': 982528, 'steps': 1918, 'batch_loss/train': 0.972049449570477} +12/21/2021 20:34:09 - INFO - codeparrot_training - Step 1919: {'lr': 0.0004984900226364924, 'samples': 983040, 'steps': 1919, 'batch_loss/train': 0.8034211080521345} +12/21/2021 20:34:20 - INFO - codeparrot_training - Step 1920: {'lr': 0.0004984882922623782, 'samples': 983552, 'steps': 1920, 'batch_loss/train': 0.7752224360592663} +12/21/2021 20:34:32 - INFO - codeparrot_training - Step 1921: {'lr': 0.0004984865609003684, 'samples': 984064, 'steps': 1921, 'batch_loss/train': 0.9371872218325734} +12/21/2021 20:34:43 - INFO - codeparrot_training - Step 1922: {'lr': 0.0004984848285504699, 'samples': 984576, 'steps': 1922, 'batch_loss/train': 0.8593639386817813} +12/21/2021 20:34:53 - INFO - codeparrot_training - Step 1923: {'lr': 0.0004984830952126894, 'samples': 985088, 'steps': 1923, 'batch_loss/train': 0.883861318230629} +12/21/2021 20:35:05 - INFO - codeparrot_training - Step 1924: {'lr': 0.0004984813608870341, 'samples': 985600, 'steps': 1924, 'batch_loss/train': 1.0461214780807495} +12/21/2021 20:35:16 - INFO - codeparrot_training - Step 1925: {'lr': 0.0004984796255735107, 'samples': 986112, 'steps': 1925, 'batch_loss/train': 1.0278077712282538} +12/21/2021 20:35:27 - INFO - codeparrot_training - Step 1926: {'lr': 0.000498477889272126, 'samples': 986624, 'steps': 1926, 'batch_loss/train': 0.8411048408597708} +12/21/2021 20:35:40 - INFO - codeparrot_training - Step 1927: {'lr': 0.0004984761519828871, 'samples': 987136, 'steps': 1927, 'batch_loss/train': 0.8074838230386376} +12/21/2021 20:35:50 - INFO - codeparrot_training - Step 1928: {'lr': 0.0004984744137058009, 'samples': 987648, 'steps': 1928, 'batch_loss/train': 1.0221177656203508} +12/21/2021 20:36:01 - INFO - codeparrot_training - Step 1929: {'lr': 0.0004984726744408742, 'samples': 988160, 'steps': 1929, 'batch_loss/train': 0.636359327705577} +12/21/2021 20:36:14 - INFO - codeparrot_training - Step 1930: {'lr': 0.000498470934188114, 'samples': 988672, 'steps': 1930, 'batch_loss/train': 0.8164314767345786} +12/21/2021 20:36:24 - INFO - codeparrot_training - Step 1931: {'lr': 0.0004984691929475272, 'samples': 989184, 'steps': 1931, 'batch_loss/train': 0.9007533332332969} +12/21/2021 20:36:35 - INFO - codeparrot_training - Step 1932: {'lr': 0.0004984674507191206, 'samples': 989696, 'steps': 1932, 'batch_loss/train': 0.815044350689277} +12/21/2021 20:36:45 - INFO - codeparrot_training - Step 1933: {'lr': 0.0004984657075029013, 'samples': 990208, 'steps': 1933, 'batch_loss/train': 1.0739290239289403} +12/21/2021 20:36:57 - INFO - codeparrot_training - Step 1934: {'lr': 0.0004984639632988762, 'samples': 990720, 'steps': 1934, 'batch_loss/train': 1.1658818731084466} +12/21/2021 20:37:08 - INFO - codeparrot_training - Step 1935: {'lr': 0.0004984622181070521, 'samples': 991232, 'steps': 1935, 'batch_loss/train': 0.8970695147290826} +12/21/2021 20:37:19 - INFO - codeparrot_training - Step 1936: {'lr': 0.0004984604719274362, 'samples': 991744, 'steps': 1936, 'batch_loss/train': 0.8768092561513186} +12/21/2021 20:37:32 - INFO - codeparrot_training - Step 1937: {'lr': 0.0004984587247600352, 'samples': 992256, 'steps': 1937, 'batch_loss/train': 0.9532602233812213} +12/21/2021 20:37:43 - INFO - codeparrot_training - Step 1938: {'lr': 0.0004984569766048561, 'samples': 992768, 'steps': 1938, 'batch_loss/train': 0.9120695497840643} +12/21/2021 20:37:54 - INFO - codeparrot_training - Step 1939: {'lr': 0.0004984552274619058, 'samples': 993280, 'steps': 1939, 'batch_loss/train': 0.908918671309948} +12/21/2021 20:38:04 - INFO - codeparrot_training - Step 1940: {'lr': 0.0004984534773311915, 'samples': 993792, 'steps': 1940, 'batch_loss/train': 0.8521115388721228} +12/21/2021 20:38:16 - INFO - codeparrot_training - Step 1941: {'lr': 0.0004984517262127199, 'samples': 994304, 'steps': 1941, 'batch_loss/train': 0.8717880635522306} +12/21/2021 20:38:27 - INFO - codeparrot_training - Step 1942: {'lr': 0.000498449974106498, 'samples': 994816, 'steps': 1942, 'batch_loss/train': 0.8815956693142653} +12/21/2021 20:38:38 - INFO - codeparrot_training - Step 1943: {'lr': 0.0004984482210125329, 'samples': 995328, 'steps': 1943, 'batch_loss/train': 0.9211061354726553} +12/21/2021 20:38:51 - INFO - codeparrot_training - Step 1944: {'lr': 0.0004984464669308314, 'samples': 995840, 'steps': 1944, 'batch_loss/train': 0.8871599705889821} +12/21/2021 20:39:01 - INFO - codeparrot_training - Step 1945: {'lr': 0.0004984447118614006, 'samples': 996352, 'steps': 1945, 'batch_loss/train': 0.7164979332592338} +12/21/2021 20:39:12 - INFO - codeparrot_training - Step 1946: {'lr': 0.0004984429558042475, 'samples': 996864, 'steps': 1946, 'batch_loss/train': 0.8750196183100343} +12/21/2021 20:39:24 - INFO - codeparrot_training - Step 1947: {'lr': 0.0004984411987593789, 'samples': 997376, 'steps': 1947, 'batch_loss/train': 0.9554291544482112} +12/21/2021 20:39:35 - INFO - codeparrot_training - Step 1948: {'lr': 0.000498439440726802, 'samples': 997888, 'steps': 1948, 'batch_loss/train': 0.9645392382517457} +12/21/2021 20:39:45 - INFO - codeparrot_training - Step 1949: {'lr': 0.0004984376817065236, 'samples': 998400, 'steps': 1949, 'batch_loss/train': 0.9548986069858074} +12/21/2021 20:39:56 - INFO - codeparrot_training - Step 1950: {'lr': 0.0004984359216985508, 'samples': 998912, 'steps': 1950, 'batch_loss/train': 0.9310208605602384} +12/21/2021 20:40:08 - INFO - codeparrot_training - Step 1951: {'lr': 0.0004984341607028906, 'samples': 999424, 'steps': 1951, 'batch_loss/train': 0.8403540216386318} +12/21/2021 20:40:18 - INFO - codeparrot_training - Step 1952: {'lr': 0.00049843239871955, 'samples': 999936, 'steps': 1952, 'batch_loss/train': 0.9184751519933343} +12/21/2021 20:40:29 - INFO - codeparrot_training - Step 1953: {'lr': 0.0004984306357485361, 'samples': 1000448, 'steps': 1953, 'batch_loss/train': 0.9114741757512093} +12/21/2021 20:40:41 - INFO - codeparrot_training - Step 1954: {'lr': 0.0004984288717898557, 'samples': 1000960, 'steps': 1954, 'batch_loss/train': 0.9521170016378164} +12/21/2021 20:40:52 - INFO - codeparrot_training - Step 1955: {'lr': 0.0004984271068435158, 'samples': 1001472, 'steps': 1955, 'batch_loss/train': 0.8104671039618552} +12/21/2021 20:41:02 - INFO - codeparrot_training - Step 1956: {'lr': 0.0004984253409095236, 'samples': 1001984, 'steps': 1956, 'batch_loss/train': 0.9920618869364262} +12/21/2021 20:41:15 - INFO - codeparrot_training - Step 1957: {'lr': 0.0004984235739878861, 'samples': 1002496, 'steps': 1957, 'batch_loss/train': 0.8919129464775324} +12/21/2021 20:41:26 - INFO - codeparrot_training - Step 1958: {'lr': 0.0004984218060786102, 'samples': 1003008, 'steps': 1958, 'batch_loss/train': 0.6220389024820179} +12/21/2021 20:41:36 - INFO - codeparrot_training - Step 1959: {'lr': 0.000498420037181703, 'samples': 1003520, 'steps': 1959, 'batch_loss/train': 0.9105502516031265} +12/21/2021 20:41:49 - INFO - codeparrot_training - Step 1960: {'lr': 0.0004984182672971715, 'samples': 1004032, 'steps': 1960, 'batch_loss/train': 0.8652134397998452} +12/21/2021 20:41:59 - INFO - codeparrot_training - Step 1961: {'lr': 0.000498416496425023, 'samples': 1004544, 'steps': 1961, 'batch_loss/train': 0.9193787034600973} +12/21/2021 20:42:10 - INFO - codeparrot_training - Step 1962: {'lr': 0.000498414724565264, 'samples': 1005056, 'steps': 1962, 'batch_loss/train': 0.8855801778845489} +12/21/2021 20:42:20 - INFO - codeparrot_training - Step 1963: {'lr': 0.000498412951717902, 'samples': 1005568, 'steps': 1963, 'batch_loss/train': 0.922630394808948} +12/21/2021 20:42:32 - INFO - codeparrot_training - Step 1964: {'lr': 0.0004984111778829439, 'samples': 1006080, 'steps': 1964, 'batch_loss/train': 0.9501363150775433} +12/21/2021 20:42:43 - INFO - codeparrot_training - Step 1965: {'lr': 0.0004984094030603967, 'samples': 1006592, 'steps': 1965, 'batch_loss/train': 0.9640448782593012} +12/21/2021 20:42:54 - INFO - codeparrot_training - Step 1966: {'lr': 0.0004984076272502675, 'samples': 1007104, 'steps': 1966, 'batch_loss/train': 0.900334213860333} +12/21/2021 20:43:07 - INFO - codeparrot_training - Step 1967: {'lr': 0.0004984058504525635, 'samples': 1007616, 'steps': 1967, 'batch_loss/train': 0.9432421568781137} +12/21/2021 20:43:17 - INFO - codeparrot_training - Step 1968: {'lr': 0.0004984040726672916, 'samples': 1008128, 'steps': 1968, 'batch_loss/train': 0.900830393191427} +12/21/2021 20:43:28 - INFO - codeparrot_training - Step 1969: {'lr': 0.0004984022938944588, 'samples': 1008640, 'steps': 1969, 'batch_loss/train': 0.8101817322894931} +12/21/2021 20:43:40 - INFO - codeparrot_training - Step 1970: {'lr': 0.0004984005141340724, 'samples': 1009152, 'steps': 1970, 'batch_loss/train': 0.8904825272038579} +12/21/2021 20:43:51 - INFO - codeparrot_training - Step 1971: {'lr': 0.0004983987333861392, 'samples': 1009664, 'steps': 1971, 'batch_loss/train': 0.8357994500547647} +12/21/2021 20:44:01 - INFO - codeparrot_training - Step 1972: {'lr': 0.0004983969516506666, 'samples': 1010176, 'steps': 1972, 'batch_loss/train': 0.90746954921633} +12/21/2021 20:44:12 - INFO - codeparrot_training - Step 1973: {'lr': 0.0004983951689276614, 'samples': 1010688, 'steps': 1973, 'batch_loss/train': 0.8991688769310713} +12/21/2021 20:44:25 - INFO - codeparrot_training - Step 1974: {'lr': 0.0004983933852171307, 'samples': 1011200, 'steps': 1974, 'batch_loss/train': 0.8147042547352612} +12/21/2021 20:44:35 - INFO - codeparrot_training - Step 1975: {'lr': 0.0004983916005190819, 'samples': 1011712, 'steps': 1975, 'batch_loss/train': 0.9409529753029346} +12/21/2021 20:44:46 - INFO - codeparrot_training - Step 1976: {'lr': 0.0004983898148335217, 'samples': 1012224, 'steps': 1976, 'batch_loss/train': 0.7380706034600735} +12/21/2021 20:44:58 - INFO - codeparrot_training - Step 1977: {'lr': 0.0004983880281604576, 'samples': 1012736, 'steps': 1977, 'batch_loss/train': 0.8662133812904358} +12/21/2021 20:45:09 - INFO - codeparrot_training - Step 1978: {'lr': 0.0004983862404998963, 'samples': 1013248, 'steps': 1978, 'batch_loss/train': 0.8539702156558633} +12/21/2021 20:45:19 - INFO - codeparrot_training - Step 1979: {'lr': 0.0004983844518518452, 'samples': 1013760, 'steps': 1979, 'batch_loss/train': 0.6292981938458979} +12/21/2021 20:45:30 - INFO - codeparrot_training - Step 1980: {'lr': 0.0004983826622163112, 'samples': 1014272, 'steps': 1980, 'batch_loss/train': 0.7358023351989686} +12/21/2021 20:45:42 - INFO - codeparrot_training - Step 1981: {'lr': 0.0004983808715933016, 'samples': 1014784, 'steps': 1981, 'batch_loss/train': 0.883704699575901} +12/21/2021 20:45:53 - INFO - codeparrot_training - Step 1982: {'lr': 0.0004983790799828234, 'samples': 1015296, 'steps': 1982, 'batch_loss/train': 0.8585545821115375} +12/21/2021 20:46:03 - INFO - codeparrot_training - Step 1983: {'lr': 0.0004983772873848838, 'samples': 1015808, 'steps': 1983, 'batch_loss/train': 0.9777622818946838} +12/21/2021 20:46:16 - INFO - codeparrot_training - Step 1984: {'lr': 0.0004983754937994899, 'samples': 1016320, 'steps': 1984, 'batch_loss/train': 0.8509199172258377} +12/21/2021 20:46:26 - INFO - codeparrot_training - Step 1985: {'lr': 0.0004983736992266488, 'samples': 1016832, 'steps': 1985, 'batch_loss/train': 0.8992130886763334} +12/21/2021 20:46:37 - INFO - codeparrot_training - Step 1986: {'lr': 0.0004983719036663676, 'samples': 1017344, 'steps': 1986, 'batch_loss/train': 0.87557462323457} +12/21/2021 20:46:50 - INFO - codeparrot_training - Step 1987: {'lr': 0.0004983701071186535, 'samples': 1017856, 'steps': 1987, 'batch_loss/train': 0.8397209839895368} +12/21/2021 20:47:00 - INFO - codeparrot_training - Step 1988: {'lr': 0.0004983683095835138, 'samples': 1018368, 'steps': 1988, 'batch_loss/train': 0.885900791734457} +12/21/2021 20:47:11 - INFO - codeparrot_training - Step 1989: {'lr': 0.0004983665110609553, 'samples': 1018880, 'steps': 1989, 'batch_loss/train': 0.9881757171824574} +12/21/2021 20:47:22 - INFO - codeparrot_training - Step 1990: {'lr': 0.0004983647115509854, 'samples': 1019392, 'steps': 1990, 'batch_loss/train': 0.9471376352012157} +12/21/2021 20:47:34 - INFO - codeparrot_training - Step 1991: {'lr': 0.0004983629110536112, 'samples': 1019904, 'steps': 1991, 'batch_loss/train': 0.8785703368484974} +12/21/2021 20:47:44 - INFO - codeparrot_training - Step 1992: {'lr': 0.0004983611095688397, 'samples': 1020416, 'steps': 1992, 'batch_loss/train': 0.8402291759848595} +12/21/2021 20:47:55 - INFO - codeparrot_training - Step 1993: {'lr': 0.0004983593070966784, 'samples': 1020928, 'steps': 1993, 'batch_loss/train': 0.9637845447286963} +12/21/2021 20:48:07 - INFO - codeparrot_training - Step 1994: {'lr': 0.0004983575036371342, 'samples': 1021440, 'steps': 1994, 'batch_loss/train': 0.8597122263163328} +12/21/2021 20:48:17 - INFO - codeparrot_training - Step 1995: {'lr': 0.0004983556991902143, 'samples': 1021952, 'steps': 1995, 'batch_loss/train': 0.9756135120987892} +12/21/2021 20:48:28 - INFO - codeparrot_training - Step 1996: {'lr': 0.000498353893755926, 'samples': 1022464, 'steps': 1996, 'batch_loss/train': 0.9825044861063361} +12/21/2021 20:48:41 - INFO - codeparrot_training - Step 1997: {'lr': 0.0004983520873342763, 'samples': 1022976, 'steps': 1997, 'batch_loss/train': 0.9247800167649984} +12/21/2021 20:48:51 - INFO - codeparrot_training - Step 1998: {'lr': 0.0004983502799252725, 'samples': 1023488, 'steps': 1998, 'batch_loss/train': 0.936079922132194} +12/21/2021 20:49:02 - INFO - codeparrot_training - Step 1999: {'lr': 0.0004983484715289218, 'samples': 1024000, 'steps': 1999, 'batch_loss/train': 0.971809234470129} +12/21/2021 20:49:14 - INFO - codeparrot_training - Step 2000: {'lr': 0.0004983466621452313, 'samples': 1024512, 'steps': 2000, 'batch_loss/train': 0.9302699360996485} +12/21/2021 20:49:25 - INFO - codeparrot_training - Step 2001: {'lr': 0.0004983448517742083, 'samples': 1025024, 'steps': 2001, 'batch_loss/train': 0.9567683283239603} +12/21/2021 20:49:35 - INFO - codeparrot_training - Step 2002: {'lr': 0.0004983430404158599, 'samples': 1025536, 'steps': 2002, 'batch_loss/train': 0.9115695282816887} +12/21/2021 20:49:46 - INFO - codeparrot_training - Step 2003: {'lr': 0.0004983412280701933, 'samples': 1026048, 'steps': 2003, 'batch_loss/train': 0.7873107944615185} +12/21/2021 20:49:59 - INFO - codeparrot_training - Step 2004: {'lr': 0.0004983394147372159, 'samples': 1026560, 'steps': 2004, 'batch_loss/train': 0.925415784586221} +12/21/2021 20:50:09 - INFO - codeparrot_training - Step 2005: {'lr': 0.0004983376004169347, 'samples': 1027072, 'steps': 2005, 'batch_loss/train': 0.9583002189174294} +12/21/2021 20:50:20 - INFO - codeparrot_training - Step 2006: {'lr': 0.0004983357851093568, 'samples': 1027584, 'steps': 2006, 'batch_loss/train': 0.8649103962816298} +12/21/2021 20:50:32 - INFO - codeparrot_training - Step 2007: {'lr': 0.0004983339688144898, 'samples': 1028096, 'steps': 2007, 'batch_loss/train': 0.9426075303927064} +12/21/2021 20:50:43 - INFO - codeparrot_training - Step 2008: {'lr': 0.0004983321515323406, 'samples': 1028608, 'steps': 2008, 'batch_loss/train': 0.8747092611156404} +12/21/2021 20:50:53 - INFO - codeparrot_training - Step 2009: {'lr': 0.0004983303332629166, 'samples': 1029120, 'steps': 2009, 'batch_loss/train': 0.8771300781518221} +12/21/2021 20:51:05 - INFO - codeparrot_training - Step 2010: {'lr': 0.0004983285140062249, 'samples': 1029632, 'steps': 2010, 'batch_loss/train': 0.9037513546645641} +12/21/2021 20:51:16 - INFO - codeparrot_training - Step 2011: {'lr': 0.0004983266937622728, 'samples': 1030144, 'steps': 2011, 'batch_loss/train': 0.8476507430896163} +12/21/2021 20:51:27 - INFO - codeparrot_training - Step 2012: {'lr': 0.0004983248725310675, 'samples': 1030656, 'steps': 2012, 'batch_loss/train': 0.8998219296336174} +12/21/2021 20:51:37 - INFO - codeparrot_training - Step 2013: {'lr': 0.0004983230503126163, 'samples': 1031168, 'steps': 2013, 'batch_loss/train': 1.7594540459103882} +12/21/2021 20:51:50 - INFO - codeparrot_training - Step 2014: {'lr': 0.0004983212271069265, 'samples': 1031680, 'steps': 2014, 'batch_loss/train': 0.9030841011554003} +12/21/2021 20:52:01 - INFO - codeparrot_training - Step 2015: {'lr': 0.0004983194029140051, 'samples': 1032192, 'steps': 2015, 'batch_loss/train': 0.6301599130965769} +12/21/2021 20:52:11 - INFO - codeparrot_training - Step 2016: {'lr': 0.0004983175777338596, 'samples': 1032704, 'steps': 2016, 'batch_loss/train': 0.9767075311392546} +12/21/2021 20:52:24 - INFO - codeparrot_training - Step 2017: {'lr': 0.0004983157515664972, 'samples': 1033216, 'steps': 2017, 'batch_loss/train': 0.9529700479470193} +12/21/2021 20:52:35 - INFO - codeparrot_training - Step 2018: {'lr': 0.0004983139244119252, 'samples': 1033728, 'steps': 2018, 'batch_loss/train': 0.9072929713875055} +12/21/2021 20:52:45 - INFO - codeparrot_training - Step 2019: {'lr': 0.0004983120962701506, 'samples': 1034240, 'steps': 2019, 'batch_loss/train': 0.9229670595377684} +12/21/2021 20:52:57 - INFO - codeparrot_training - Step 2020: {'lr': 0.000498310267141181, 'samples': 1034752, 'steps': 2020, 'batch_loss/train': 0.8694041995331645} +12/21/2021 20:53:08 - INFO - codeparrot_training - Step 2021: {'lr': 0.0004983084370250235, 'samples': 1035264, 'steps': 2021, 'batch_loss/train': 0.8456221492961049} +12/21/2021 20:53:19 - INFO - codeparrot_training - Step 2022: {'lr': 0.0004983066059216854, 'samples': 1035776, 'steps': 2022, 'batch_loss/train': 0.9045664104633033} +12/21/2021 20:53:29 - INFO - codeparrot_training - Step 2023: {'lr': 0.000498304773831174, 'samples': 1036288, 'steps': 2023, 'batch_loss/train': 0.8795490153133869} +12/21/2021 20:53:41 - INFO - codeparrot_training - Step 2024: {'lr': 0.0004983029407534966, 'samples': 1036800, 'steps': 2024, 'batch_loss/train': 0.9291595285758376} +12/21/2021 20:53:52 - INFO - codeparrot_training - Step 2025: {'lr': 0.0004983011066886604, 'samples': 1037312, 'steps': 2025, 'batch_loss/train': 0.8593326075933874} +12/21/2021 20:54:02 - INFO - codeparrot_training - Step 2026: {'lr': 0.0004982992716366729, 'samples': 1037824, 'steps': 2026, 'batch_loss/train': 0.8937574117444456} +12/21/2021 20:54:15 - INFO - codeparrot_training - Step 2027: {'lr': 0.0004982974355975412, 'samples': 1038336, 'steps': 2027, 'batch_loss/train': 0.8459899229928851} +12/21/2021 20:54:26 - INFO - codeparrot_training - Step 2028: {'lr': 0.0004982955985712725, 'samples': 1038848, 'steps': 2028, 'batch_loss/train': 0.9798802491277456} +12/21/2021 20:54:36 - INFO - codeparrot_training - Step 2029: {'lr': 0.0004982937605578743, 'samples': 1039360, 'steps': 2029, 'batch_loss/train': 0.8368792654946446} +12/21/2021 20:54:49 - INFO - codeparrot_training - Step 2030: {'lr': 0.0004982919215573541, 'samples': 1039872, 'steps': 2030, 'batch_loss/train': 0.9758359370753169} +12/21/2021 20:55:00 - INFO - codeparrot_training - Step 2031: {'lr': 0.0004982900815697186, 'samples': 1040384, 'steps': 2031, 'batch_loss/train': 0.9173170011490583} +12/21/2021 20:55:11 - INFO - codeparrot_training - Step 2032: {'lr': 0.0004982882405949757, 'samples': 1040896, 'steps': 2032, 'batch_loss/train': 1.336600348353386} +12/21/2021 20:55:21 - INFO - codeparrot_training - Step 2033: {'lr': 0.0004982863986331325, 'samples': 1041408, 'steps': 2033, 'batch_loss/train': 0.9601623322814703} +12/21/2021 20:55:33 - INFO - codeparrot_training - Step 2034: {'lr': 0.0004982845556841963, 'samples': 1041920, 'steps': 2034, 'batch_loss/train': 0.9148767059668899} +12/21/2021 20:55:44 - INFO - codeparrot_training - Step 2035: {'lr': 0.0004982827117481746, 'samples': 1042432, 'steps': 2035, 'batch_loss/train': 0.9845279511064291} +12/21/2021 20:55:54 - INFO - codeparrot_training - Step 2036: {'lr': 0.0004982808668250743, 'samples': 1042944, 'steps': 2036, 'batch_loss/train': 0.8697410952299833} +12/21/2021 20:56:07 - INFO - codeparrot_training - Step 2037: {'lr': 0.0004982790209149032, 'samples': 1043456, 'steps': 2037, 'batch_loss/train': 0.8704917645081878} +12/21/2021 20:56:18 - INFO - codeparrot_training - Step 2038: {'lr': 0.0004982771740176684, 'samples': 1043968, 'steps': 2038, 'batch_loss/train': 0.9036972601898015} +12/21/2021 20:56:28 - INFO - codeparrot_training - Step 2039: {'lr': 0.0004982753261333773, 'samples': 1044480, 'steps': 2039, 'batch_loss/train': 0.8113594288006425} +12/21/2021 20:56:40 - INFO - codeparrot_training - Step 2040: {'lr': 0.0004982734772620373, 'samples': 1044992, 'steps': 2040, 'batch_loss/train': 1.017271576449275} +12/21/2021 20:56:51 - INFO - codeparrot_training - Step 2041: {'lr': 0.0004982716274036557, 'samples': 1045504, 'steps': 2041, 'batch_loss/train': 1.009530832991004} +12/21/2021 20:57:02 - INFO - codeparrot_training - Step 2042: {'lr': 0.0004982697765582399, 'samples': 1046016, 'steps': 2042, 'batch_loss/train': 0.8396618878468871} +12/21/2021 20:57:14 - INFO - codeparrot_training - Step 2043: {'lr': 0.0004982679247257971, 'samples': 1046528, 'steps': 2043, 'batch_loss/train': 0.8713187663815916} +12/21/2021 20:57:25 - INFO - codeparrot_training - Step 2044: {'lr': 0.0004982660719063349, 'samples': 1047040, 'steps': 2044, 'batch_loss/train': 0.9142915960401297} +12/21/2021 20:57:36 - INFO - codeparrot_training - Step 2045: {'lr': 0.0004982642180998603, 'samples': 1047552, 'steps': 2045, 'batch_loss/train': 0.9489435544237494} +12/21/2021 20:57:46 - INFO - codeparrot_training - Step 2046: {'lr': 0.0004982623633063811, 'samples': 1048064, 'steps': 2046, 'batch_loss/train': 0.9301741113886237} +12/21/2021 20:57:58 - INFO - codeparrot_training - Step 2047: {'lr': 0.0004982605075259045, 'samples': 1048576, 'steps': 2047, 'batch_loss/train': 0.8671368854120374} +12/21/2021 20:58:09 - INFO - codeparrot_training - Step 2048: {'lr': 0.0004982586507584378, 'samples': 1049088, 'steps': 2048, 'batch_loss/train': 0.9748756121844053} +12/21/2021 20:58:20 - INFO - codeparrot_training - Step 2049: {'lr': 0.0004982567930039884, 'samples': 1049600, 'steps': 2049, 'batch_loss/train': 0.8773713186383247} +12/21/2021 20:58:32 - INFO - codeparrot_training - Step 2050: {'lr': 0.0004982549342625639, 'samples': 1050112, 'steps': 2050, 'batch_loss/train': 0.8680571410804987} +12/21/2021 20:58:43 - INFO - codeparrot_training - Step 2051: {'lr': 0.0004982530745341713, 'samples': 1050624, 'steps': 2051, 'batch_loss/train': 0.9838526826351881} +12/21/2021 20:58:53 - INFO - codeparrot_training - Step 2052: {'lr': 0.0004982512138188183, 'samples': 1051136, 'steps': 2052, 'batch_loss/train': 0.9161370927467942} +12/21/2021 20:59:06 - INFO - codeparrot_training - Step 2053: {'lr': 0.0004982493521165123, 'samples': 1051648, 'steps': 2053, 'batch_loss/train': 0.7827617154689506} +12/21/2021 20:59:17 - INFO - codeparrot_training - Step 2054: {'lr': 0.0004982474894272605, 'samples': 1052160, 'steps': 2054, 'batch_loss/train': 0.7820278378203511} +12/21/2021 20:59:27 - INFO - codeparrot_training - Step 2055: {'lr': 0.0004982456257510705, 'samples': 1052672, 'steps': 2055, 'batch_loss/train': 0.961200850084424} +12/21/2021 20:59:38 - INFO - codeparrot_training - Step 2056: {'lr': 0.0004982437610879496, 'samples': 1053184, 'steps': 2056, 'batch_loss/train': 1.027608166448772} +12/21/2021 20:59:50 - INFO - codeparrot_training - Step 2057: {'lr': 0.0004982418954379052, 'samples': 1053696, 'steps': 2057, 'batch_loss/train': 0.7977152357343584} +12/21/2021 21:00:01 - INFO - codeparrot_training - Step 2058: {'lr': 0.0004982400288009448, 'samples': 1054208, 'steps': 2058, 'batch_loss/train': 0.8413022616878152} +12/21/2021 21:00:11 - INFO - codeparrot_training - Step 2059: {'lr': 0.0004982381611770756, 'samples': 1054720, 'steps': 2059, 'batch_loss/train': 0.7534038992598653} +12/21/2021 21:00:23 - INFO - codeparrot_training - Step 2060: {'lr': 0.0004982362925663054, 'samples': 1055232, 'steps': 2060, 'batch_loss/train': 0.8310652254149318} +12/21/2021 21:00:34 - INFO - codeparrot_training - Step 2061: {'lr': 0.0004982344229686414, 'samples': 1055744, 'steps': 2061, 'batch_loss/train': 0.8373401000862941} +12/21/2021 21:00:44 - INFO - codeparrot_training - Step 2062: {'lr': 0.0004982325523840911, 'samples': 1056256, 'steps': 2062, 'batch_loss/train': 0.8499380508437753} +12/21/2021 21:00:56 - INFO - codeparrot_training - Step 2063: {'lr': 0.0004982306808126618, 'samples': 1056768, 'steps': 2063, 'batch_loss/train': 0.8572957813739777} +12/21/2021 21:01:07 - INFO - codeparrot_training - Step 2064: {'lr': 0.0004982288082543611, 'samples': 1057280, 'steps': 2064, 'batch_loss/train': 0.8479521656408906} +12/21/2021 21:01:18 - INFO - codeparrot_training - Step 2065: {'lr': 0.0004982269347091964, 'samples': 1057792, 'steps': 2065, 'batch_loss/train': 0.8383498266339302} +12/21/2021 21:01:28 - INFO - codeparrot_training - Step 2066: {'lr': 0.000498225060177175, 'samples': 1058304, 'steps': 2066, 'batch_loss/train': 0.9711756845936179} +12/21/2021 21:01:41 - INFO - codeparrot_training - Step 2067: {'lr': 0.0004982231846583046, 'samples': 1058816, 'steps': 2067, 'batch_loss/train': 0.9383909879252315} +12/21/2021 21:01:52 - INFO - codeparrot_training - Step 2068: {'lr': 0.0004982213081525926, 'samples': 1059328, 'steps': 2068, 'batch_loss/train': 0.8447313709184527} +12/21/2021 21:02:02 - INFO - codeparrot_training - Step 2069: {'lr': 0.0004982194306600464, 'samples': 1059840, 'steps': 2069, 'batch_loss/train': 0.8892351621761918} +12/21/2021 21:02:15 - INFO - codeparrot_training - Step 2070: {'lr': 0.0004982175521806734, 'samples': 1060352, 'steps': 2070, 'batch_loss/train': 0.8545324122533202} +12/21/2021 21:02:25 - INFO - codeparrot_training - Step 2071: {'lr': 0.0004982156727144811, 'samples': 1060864, 'steps': 2071, 'batch_loss/train': 0.7637093365192413} +12/21/2021 21:02:36 - INFO - codeparrot_training - Step 2072: {'lr': 0.0004982137922614771, 'samples': 1061376, 'steps': 2072, 'batch_loss/train': 0.8623336441814899} +12/21/2021 21:02:48 - INFO - codeparrot_training - Step 2073: {'lr': 0.0004982119108216688, 'samples': 1061888, 'steps': 2073, 'batch_loss/train': 0.9540804685093462} +12/21/2021 21:02:59 - INFO - codeparrot_training - Step 2074: {'lr': 0.0004982100283950636, 'samples': 1062400, 'steps': 2074, 'batch_loss/train': 0.82081974146422} +12/21/2021 21:03:10 - INFO - codeparrot_training - Step 2075: {'lr': 0.0004982081449816689, 'samples': 1062912, 'steps': 2075, 'batch_loss/train': 0.8770499657839537} +12/21/2021 21:03:20 - INFO - codeparrot_training - Step 2076: {'lr': 0.0004982062605814925, 'samples': 1063424, 'steps': 2076, 'batch_loss/train': 0.9273561807349324} +12/21/2021 21:03:32 - INFO - codeparrot_training - Step 2077: {'lr': 0.0004982043751945418, 'samples': 1063936, 'steps': 2077, 'batch_loss/train': 0.7912288075312972} +12/21/2021 21:03:43 - INFO - codeparrot_training - Step 2078: {'lr': 0.0004982024888208241, 'samples': 1064448, 'steps': 2078, 'batch_loss/train': 0.8962944250088185} +12/21/2021 21:03:54 - INFO - codeparrot_training - Step 2079: {'lr': 0.0004982006014603471, 'samples': 1064960, 'steps': 2079, 'batch_loss/train': 0.9481277707964182} +12/21/2021 21:04:06 - INFO - codeparrot_training - Step 2080: {'lr': 0.0004981987131131182, 'samples': 1065472, 'steps': 2080, 'batch_loss/train': 0.9640060588717461} +12/21/2021 21:04:16 - INFO - codeparrot_training - Step 2081: {'lr': 0.0004981968237791448, 'samples': 1065984, 'steps': 2081, 'batch_loss/train': 0.9303702700417489} +12/21/2021 21:04:27 - INFO - codeparrot_training - Step 2082: {'lr': 0.0004981949334584347, 'samples': 1066496, 'steps': 2082, 'batch_loss/train': 0.8874542722478509} +12/21/2021 21:04:40 - INFO - codeparrot_training - Step 2083: {'lr': 0.0004981930421509952, 'samples': 1067008, 'steps': 2083, 'batch_loss/train': 1.031493254005909} +12/21/2021 21:04:50 - INFO - codeparrot_training - Step 2084: {'lr': 0.0004981911498568339, 'samples': 1067520, 'steps': 2084, 'batch_loss/train': 0.6493448503315449} +12/21/2021 21:05:01 - INFO - codeparrot_training - Step 2085: {'lr': 0.0004981892565759583, 'samples': 1068032, 'steps': 2085, 'batch_loss/train': 0.8720532795414329} +12/21/2021 21:05:12 - INFO - codeparrot_training - Step 2086: {'lr': 0.0004981873623083759, 'samples': 1068544, 'steps': 2086, 'batch_loss/train': 0.8928144611418247} +12/21/2021 21:05:24 - INFO - codeparrot_training - Step 2087: {'lr': 0.0004981854670540942, 'samples': 1069056, 'steps': 2087, 'batch_loss/train': 0.9243437945842743} +12/21/2021 21:05:35 - INFO - codeparrot_training - Step 2088: {'lr': 0.0004981835708131211, 'samples': 1069568, 'steps': 2088, 'batch_loss/train': 0.9362732470035553} +12/21/2021 21:05:45 - INFO - codeparrot_training - Step 2089: {'lr': 0.0004981816735854635, 'samples': 1070080, 'steps': 2089, 'batch_loss/train': 0.9847526662051678} +12/21/2021 21:05:58 - INFO - codeparrot_training - Step 2090: {'lr': 0.0004981797753711295, 'samples': 1070592, 'steps': 2090, 'batch_loss/train': 0.8986259873490781} +12/21/2021 21:06:09 - INFO - codeparrot_training - Step 2091: {'lr': 0.0004981778761701262, 'samples': 1071104, 'steps': 2091, 'batch_loss/train': 1.6072808797471225} +12/21/2021 21:06:19 - INFO - codeparrot_training - Step 2092: {'lr': 0.0004981759759824615, 'samples': 1071616, 'steps': 2092, 'batch_loss/train': 0.8402136759832501} +12/21/2021 21:06:31 - INFO - codeparrot_training - Step 2093: {'lr': 0.0004981740748081429, 'samples': 1072128, 'steps': 2093, 'batch_loss/train': 0.8786074016243219} +12/21/2021 21:06:42 - INFO - codeparrot_training - Step 2094: {'lr': 0.0004981721726471778, 'samples': 1072640, 'steps': 2094, 'batch_loss/train': 0.9028868023306131} +12/21/2021 21:06:52 - INFO - codeparrot_training - Step 2095: {'lr': 0.0004981702694995737, 'samples': 1073152, 'steps': 2095, 'batch_loss/train': 0.8658848702907562} +12/21/2021 21:07:03 - INFO - codeparrot_training - Step 2096: {'lr': 0.0004981683653653385, 'samples': 1073664, 'steps': 2096, 'batch_loss/train': 0.8809843941126019} +12/21/2021 21:07:15 - INFO - codeparrot_training - Step 2097: {'lr': 0.0004981664602444795, 'samples': 1074176, 'steps': 2097, 'batch_loss/train': 0.9681063313037157} +12/21/2021 21:07:26 - INFO - codeparrot_training - Step 2098: {'lr': 0.0004981645541370045, 'samples': 1074688, 'steps': 2098, 'batch_loss/train': 0.7830138020217419} +12/21/2021 21:07:36 - INFO - codeparrot_training - Step 2099: {'lr': 0.0004981626470429208, 'samples': 1075200, 'steps': 2099, 'batch_loss/train': 0.8380732387304306} +12/21/2021 21:07:48 - INFO - codeparrot_training - Step 2100: {'lr': 0.0004981607389622361, 'samples': 1075712, 'steps': 2100, 'batch_loss/train': 0.8200987172313035} +12/21/2021 21:07:59 - INFO - codeparrot_training - Step 2101: {'lr': 0.000498158829894958, 'samples': 1076224, 'steps': 2101, 'batch_loss/train': 0.90417237021029} +12/21/2021 21:08:09 - INFO - codeparrot_training - Step 2102: {'lr': 0.0004981569198410943, 'samples': 1076736, 'steps': 2102, 'batch_loss/train': 0.9305787873454392} +12/21/2021 21:08:22 - INFO - codeparrot_training - Step 2103: {'lr': 0.0004981550088006523, 'samples': 1077248, 'steps': 2103, 'batch_loss/train': 0.8563022955786437} +12/21/2021 21:08:33 - INFO - codeparrot_training - Step 2104: {'lr': 0.0004981530967736395, 'samples': 1077760, 'steps': 2104, 'batch_loss/train': 0.8638444542884827} +12/21/2021 21:08:43 - INFO - codeparrot_training - Step 2105: {'lr': 0.0004981511837600639, 'samples': 1078272, 'steps': 2105, 'batch_loss/train': 0.8885208987630904} +12/21/2021 21:08:54 - INFO - codeparrot_training - Step 2106: {'lr': 0.0004981492697599327, 'samples': 1078784, 'steps': 2106, 'batch_loss/train': 1.0104186870157719} +12/21/2021 21:09:06 - INFO - codeparrot_training - Step 2107: {'lr': 0.0004981473547732539, 'samples': 1079296, 'steps': 2107, 'batch_loss/train': 0.9351612981408834} +12/21/2021 21:09:17 - INFO - codeparrot_training - Step 2108: {'lr': 0.0004981454388000348, 'samples': 1079808, 'steps': 2108, 'batch_loss/train': 0.8659628313034773} +12/21/2021 21:09:27 - INFO - codeparrot_training - Step 2109: {'lr': 0.0004981435218402832, 'samples': 1080320, 'steps': 2109, 'batch_loss/train': 0.8570291493088007} +12/21/2021 21:09:39 - INFO - codeparrot_training - Step 2110: {'lr': 0.0004981416038940065, 'samples': 1080832, 'steps': 2110, 'batch_loss/train': 1.0245922897593118} +12/21/2021 21:09:50 - INFO - codeparrot_training - Step 2111: {'lr': 0.0004981396849612126, 'samples': 1081344, 'steps': 2111, 'batch_loss/train': 0.8535224534571171} +12/21/2021 21:10:00 - INFO - codeparrot_training - Step 2112: {'lr': 0.000498137765041909, 'samples': 1081856, 'steps': 2112, 'batch_loss/train': 0.838034956716001} +12/21/2021 21:10:13 - INFO - codeparrot_training - Step 2113: {'lr': 0.0004981358441361033, 'samples': 1082368, 'steps': 2113, 'batch_loss/train': 0.8196658273227513} +12/21/2021 21:10:24 - INFO - codeparrot_training - Step 2114: {'lr': 0.0004981339222438031, 'samples': 1082880, 'steps': 2114, 'batch_loss/train': 0.9101518481038511} +12/21/2021 21:10:35 - INFO - codeparrot_training - Step 2115: {'lr': 0.0004981319993650163, 'samples': 1083392, 'steps': 2115, 'batch_loss/train': 0.8587559210136533} +12/21/2021 21:10:45 - INFO - codeparrot_training - Step 2116: {'lr': 0.0004981300754997503, 'samples': 1083904, 'steps': 2116, 'batch_loss/train': 0.8440177515149117} +12/21/2021 21:10:57 - INFO - codeparrot_training - Step 2117: {'lr': 0.0004981281506480128, 'samples': 1084416, 'steps': 2117, 'batch_loss/train': 0.8158174208365381} +12/21/2021 21:11:08 - INFO - codeparrot_training - Step 2118: {'lr': 0.0004981262248098114, 'samples': 1084928, 'steps': 2118, 'batch_loss/train': 0.8434099815785885} +12/21/2021 21:11:18 - INFO - codeparrot_training - Step 2119: {'lr': 0.0004981242979851539, 'samples': 1085440, 'steps': 2119, 'batch_loss/train': 0.9283712981268764} +12/21/2021 21:11:31 - INFO - codeparrot_training - Step 2120: {'lr': 0.0004981223701740478, 'samples': 1085952, 'steps': 2120, 'batch_loss/train': 0.9390012174844742} +12/21/2021 21:11:42 - INFO - codeparrot_training - Step 2121: {'lr': 0.0004981204413765009, 'samples': 1086464, 'steps': 2121, 'batch_loss/train': 0.8318948755040765} +12/21/2021 21:11:52 - INFO - codeparrot_training - Step 2122: {'lr': 0.0004981185115925208, 'samples': 1086976, 'steps': 2122, 'batch_loss/train': 1.4338542288169265} +12/21/2021 21:12:05 - INFO - codeparrot_training - Step 2123: {'lr': 0.0004981165808221152, 'samples': 1087488, 'steps': 2123, 'batch_loss/train': 0.909333495888859} +12/21/2021 21:12:16 - INFO - codeparrot_training - Step 2124: {'lr': 0.0004981146490652918, 'samples': 1088000, 'steps': 2124, 'batch_loss/train': 0.9454401088878512} +12/21/2021 21:12:26 - INFO - codeparrot_training - Step 2125: {'lr': 0.0004981127163220582, 'samples': 1088512, 'steps': 2125, 'batch_loss/train': 0.9084541611373425} +12/21/2021 21:12:37 - INFO - codeparrot_training - Step 2126: {'lr': 0.000498110782592422, 'samples': 1089024, 'steps': 2126, 'batch_loss/train': 0.9888563510030508} +12/21/2021 21:12:49 - INFO - codeparrot_training - Step 2127: {'lr': 0.0004981088478763912, 'samples': 1089536, 'steps': 2127, 'batch_loss/train': 1.0494901379570365} +12/21/2021 21:13:00 - INFO - codeparrot_training - Step 2128: {'lr': 0.0004981069121739732, 'samples': 1090048, 'steps': 2128, 'batch_loss/train': 1.0097657274454832} +12/21/2021 21:13:11 - INFO - codeparrot_training - Step 2129: {'lr': 0.0004981049754851759, 'samples': 1090560, 'steps': 2129, 'batch_loss/train': 0.8863750156015158} +12/21/2021 21:13:24 - INFO - codeparrot_training - Step 2130: {'lr': 0.0004981030378100068, 'samples': 1091072, 'steps': 2130, 'batch_loss/train': 0.8474823962897062} +12/21/2021 21:13:34 - INFO - codeparrot_training - Step 2131: {'lr': 0.0004981010991484736, 'samples': 1091584, 'steps': 2131, 'batch_loss/train': 0.8327016048133373} +12/21/2021 21:13:45 - INFO - codeparrot_training - Step 2132: {'lr': 0.0004980991595005843, 'samples': 1092096, 'steps': 2132, 'batch_loss/train': 0.8102053515613079} +12/21/2021 21:13:57 - INFO - codeparrot_training - Step 2133: {'lr': 0.0004980972188663464, 'samples': 1092608, 'steps': 2133, 'batch_loss/train': 0.8455256223678589} +12/21/2021 21:14:08 - INFO - codeparrot_training - Step 2134: {'lr': 0.0004980952772457676, 'samples': 1093120, 'steps': 2134, 'batch_loss/train': 0.8444462269544601} +12/21/2021 21:14:18 - INFO - codeparrot_training - Step 2135: {'lr': 0.0004980933346388556, 'samples': 1093632, 'steps': 2135, 'batch_loss/train': 0.9169659428298473} +12/21/2021 21:14:29 - INFO - codeparrot_training - Step 2136: {'lr': 0.0004980913910456183, 'samples': 1094144, 'steps': 2136, 'batch_loss/train': 1.0533856302499771} +12/21/2021 21:14:41 - INFO - codeparrot_training - Step 2137: {'lr': 0.0004980894464660632, 'samples': 1094656, 'steps': 2137, 'batch_loss/train': 1.2273599151521921} +12/21/2021 21:14:51 - INFO - codeparrot_training - Step 2138: {'lr': 0.0004980875009001982, 'samples': 1095168, 'steps': 2138, 'batch_loss/train': 0.8861374240368605} +12/21/2021 21:15:02 - INFO - codeparrot_training - Step 2139: {'lr': 0.0004980855543480309, 'samples': 1095680, 'steps': 2139, 'batch_loss/train': 0.8363719885237515} +12/21/2021 21:15:14 - INFO - codeparrot_training - Step 2140: {'lr': 0.0004980836068095691, 'samples': 1096192, 'steps': 2140, 'batch_loss/train': 0.866899729706347} +12/21/2021 21:15:25 - INFO - codeparrot_training - Step 2141: {'lr': 0.0004980816582848206, 'samples': 1096704, 'steps': 2141, 'batch_loss/train': 0.9049460580572486} +12/21/2021 21:15:35 - INFO - codeparrot_training - Step 2142: {'lr': 0.0004980797087737931, 'samples': 1097216, 'steps': 2142, 'batch_loss/train': 0.9276563785970211} +12/21/2021 21:15:48 - INFO - codeparrot_training - Step 2143: {'lr': 0.0004980777582764944, 'samples': 1097728, 'steps': 2143, 'batch_loss/train': 0.8835952868685126} +12/21/2021 21:15:59 - INFO - codeparrot_training - Step 2144: {'lr': 0.0004980758067929321, 'samples': 1098240, 'steps': 2144, 'batch_loss/train': 0.9366036951541901} +12/21/2021 21:16:09 - INFO - codeparrot_training - Step 2145: {'lr': 0.0004980738543231141, 'samples': 1098752, 'steps': 2145, 'batch_loss/train': 0.8915330907329917} +12/21/2021 21:16:20 - INFO - codeparrot_training - Step 2146: {'lr': 0.0004980719008670482, 'samples': 1099264, 'steps': 2146, 'batch_loss/train': 0.9525922108441591} +12/21/2021 21:16:32 - INFO - codeparrot_training - Step 2147: {'lr': 0.0004980699464247419, 'samples': 1099776, 'steps': 2147, 'batch_loss/train': 0.9620562195777893} +12/21/2021 21:16:43 - INFO - codeparrot_training - Step 2148: {'lr': 0.0004980679909962033, 'samples': 1100288, 'steps': 2148, 'batch_loss/train': 0.9537472035735846} +12/21/2021 21:16:53 - INFO - codeparrot_training - Step 2149: {'lr': 0.0004980660345814401, 'samples': 1100800, 'steps': 2149, 'batch_loss/train': 0.8254425078630447} +12/21/2021 21:17:06 - INFO - codeparrot_training - Step 2150: {'lr': 0.0004980640771804598, 'samples': 1101312, 'steps': 2150, 'batch_loss/train': 0.9531893031671643} +12/21/2021 21:17:16 - INFO - codeparrot_training - Step 2151: {'lr': 0.0004980621187932706, 'samples': 1101824, 'steps': 2151, 'batch_loss/train': 1.106052802875638} +12/21/2021 21:17:27 - INFO - codeparrot_training - Step 2152: {'lr': 0.00049806015941988, 'samples': 1102336, 'steps': 2152, 'batch_loss/train': 1.0102463271468878} +12/21/2021 21:17:40 - INFO - codeparrot_training - Step 2153: {'lr': 0.0004980581990602959, 'samples': 1102848, 'steps': 2153, 'batch_loss/train': 0.9261414585635066} +12/21/2021 21:17:50 - INFO - codeparrot_training - Step 2154: {'lr': 0.000498056237714526, 'samples': 1103360, 'steps': 2154, 'batch_loss/train': 0.9095185939222574} +12/21/2021 21:18:01 - INFO - codeparrot_training - Step 2155: {'lr': 0.0004980542753825782, 'samples': 1103872, 'steps': 2155, 'batch_loss/train': 0.8923492953181267} +12/21/2021 21:18:13 - INFO - codeparrot_training - Step 2156: {'lr': 0.0004980523120644603, 'samples': 1104384, 'steps': 2156, 'batch_loss/train': 0.9582636393606663} +12/21/2021 21:18:24 - INFO - codeparrot_training - Step 2157: {'lr': 0.00049805034776018, 'samples': 1104896, 'steps': 2157, 'batch_loss/train': 0.8866456095129251} +12/21/2021 21:18:34 - INFO - codeparrot_training - Step 2158: {'lr': 0.0004980483824697453, 'samples': 1105408, 'steps': 2158, 'batch_loss/train': 1.0278130862861872} +12/21/2021 21:18:45 - INFO - codeparrot_training - Step 2159: {'lr': 0.0004980464161931637, 'samples': 1105920, 'steps': 2159, 'batch_loss/train': 0.943147636950016} +12/21/2021 21:18:57 - INFO - codeparrot_training - Step 2160: {'lr': 0.0004980444489304434, 'samples': 1106432, 'steps': 2160, 'batch_loss/train': 0.8256605723872781} +12/21/2021 21:19:08 - INFO - codeparrot_training - Step 2161: {'lr': 0.0004980424806815918, 'samples': 1106944, 'steps': 2161, 'batch_loss/train': 1.0392022356390953} +12/21/2021 21:19:18 - INFO - codeparrot_training - Step 2162: {'lr': 0.0004980405114466172, 'samples': 1107456, 'steps': 2162, 'batch_loss/train': 0.8696748856455088} +12/21/2021 21:19:31 - INFO - codeparrot_training - Step 2163: {'lr': 0.0004980385412255271, 'samples': 1107968, 'steps': 2163, 'batch_loss/train': 0.9622868206351995} +12/21/2021 21:19:41 - INFO - codeparrot_training - Step 2164: {'lr': 0.0004980365700183294, 'samples': 1108480, 'steps': 2164, 'batch_loss/train': 0.8946727151051164} +12/21/2021 21:19:52 - INFO - codeparrot_training - Step 2165: {'lr': 0.000498034597825032, 'samples': 1108992, 'steps': 2165, 'batch_loss/train': 0.9010253064334393} +12/21/2021 21:20:04 - INFO - codeparrot_training - Step 2166: {'lr': 0.0004980326246456425, 'samples': 1109504, 'steps': 2166, 'batch_loss/train': 0.9960449505597353} +12/21/2021 21:20:15 - INFO - codeparrot_training - Step 2167: {'lr': 0.0004980306504801691, 'samples': 1110016, 'steps': 2167, 'batch_loss/train': 0.9025410525500774} +12/21/2021 21:20:26 - INFO - codeparrot_training - Step 2168: {'lr': 0.0004980286753286195, 'samples': 1110528, 'steps': 2168, 'batch_loss/train': 0.8581621199846268} +12/21/2021 21:20:36 - INFO - codeparrot_training - Step 2169: {'lr': 0.0004980266991910015, 'samples': 1111040, 'steps': 2169, 'batch_loss/train': 1.4633424987550825} +12/21/2021 21:20:49 - INFO - codeparrot_training - Step 2170: {'lr': 0.0004980247220673229, 'samples': 1111552, 'steps': 2170, 'batch_loss/train': 0.9213416995480657} +12/21/2021 21:20:59 - INFO - codeparrot_training - Step 2171: {'lr': 0.0004980227439575918, 'samples': 1112064, 'steps': 2171, 'batch_loss/train': 0.8921311385929585} +12/21/2021 21:21:10 - INFO - codeparrot_training - Step 2172: {'lr': 0.0004980207648618159, 'samples': 1112576, 'steps': 2172, 'batch_loss/train': 0.8760981447994709} +12/21/2021 21:21:22 - INFO - codeparrot_training - Step 2173: {'lr': 0.0004980187847800031, 'samples': 1113088, 'steps': 2173, 'batch_loss/train': 0.8544832719489932} +12/21/2021 21:21:33 - INFO - codeparrot_training - Step 2174: {'lr': 0.0004980168037121612, 'samples': 1113600, 'steps': 2174, 'batch_loss/train': 0.9261558644939214} +12/21/2021 21:21:43 - INFO - codeparrot_training - Step 2175: {'lr': 0.0004980148216582981, 'samples': 1114112, 'steps': 2175, 'batch_loss/train': 0.9252141518518329} +12/21/2021 21:21:56 - INFO - codeparrot_training - Step 2176: {'lr': 0.0004980128386184218, 'samples': 1114624, 'steps': 2176, 'batch_loss/train': 0.8932766157668084} +12/21/2021 21:22:07 - INFO - codeparrot_training - Step 2177: {'lr': 0.0004980108545925401, 'samples': 1115136, 'steps': 2177, 'batch_loss/train': 0.8832616172730923} +12/21/2021 21:22:17 - INFO - codeparrot_training - Step 2178: {'lr': 0.0004980088695806608, 'samples': 1115648, 'steps': 2178, 'batch_loss/train': 0.8894687267020345} +12/21/2021 21:22:28 - INFO - codeparrot_training - Step 2179: {'lr': 0.000498006883582792, 'samples': 1116160, 'steps': 2179, 'batch_loss/train': 0.9565798686817288} +12/21/2021 21:22:41 - INFO - codeparrot_training - Step 2180: {'lr': 0.0004980048965989413, 'samples': 1116672, 'steps': 2180, 'batch_loss/train': 0.8366931579075754} +12/21/2021 21:22:51 - INFO - codeparrot_training - Step 2181: {'lr': 0.0004980029086291169, 'samples': 1117184, 'steps': 2181, 'batch_loss/train': 0.784460118971765} +12/21/2021 21:23:02 - INFO - codeparrot_training - Step 2182: {'lr': 0.0004980009196733266, 'samples': 1117696, 'steps': 2182, 'batch_loss/train': 0.8811569255776703} +12/21/2021 21:23:14 - INFO - codeparrot_training - Step 2183: {'lr': 0.0004979989297315782, 'samples': 1118208, 'steps': 2183, 'batch_loss/train': 0.887143530882895} +12/21/2021 21:23:25 - INFO - codeparrot_training - Step 2184: {'lr': 0.0004979969388038798, 'samples': 1118720, 'steps': 2184, 'batch_loss/train': 0.8376783616840839} +12/21/2021 21:23:36 - INFO - codeparrot_training - Step 2185: {'lr': 0.0004979949468902392, 'samples': 1119232, 'steps': 2185, 'batch_loss/train': 1.4935764158144593} +12/21/2021 21:23:47 - INFO - codeparrot_training - Step 2186: {'lr': 0.0004979929539906643, 'samples': 1119744, 'steps': 2186, 'batch_loss/train': 0.9285049010068178} +12/21/2021 21:23:58 - INFO - codeparrot_training - Step 2187: {'lr': 0.0004979909601051629, 'samples': 1120256, 'steps': 2187, 'batch_loss/train': 0.8790778340771794} +12/21/2021 21:24:09 - INFO - codeparrot_training - Step 2188: {'lr': 0.0004979889652337433, 'samples': 1120768, 'steps': 2188, 'batch_loss/train': 0.9266867991536856} +12/21/2021 21:24:19 - INFO - codeparrot_training - Step 2189: {'lr': 0.0004979869693764131, 'samples': 1121280, 'steps': 2189, 'batch_loss/train': 0.8623130759224296} +12/21/2021 21:24:33 - INFO - codeparrot_training - Step 2190: {'lr': 0.0004979849725331804, 'samples': 1121792, 'steps': 2190, 'batch_loss/train': 0.917578674852848} +12/21/2021 21:24:44 - INFO - codeparrot_training - Step 2191: {'lr': 0.000497982974704053, 'samples': 1122304, 'steps': 2191, 'batch_loss/train': 0.800558096729219} +12/21/2021 21:24:54 - INFO - codeparrot_training - Step 2192: {'lr': 0.000497980975889039, 'samples': 1122816, 'steps': 2192, 'batch_loss/train': 0.9526922944933176} +12/21/2021 21:25:06 - INFO - codeparrot_training - Step 2193: {'lr': 0.0004979789760881463, 'samples': 1123328, 'steps': 2193, 'batch_loss/train': 0.89458985067904} +12/21/2021 21:25:17 - INFO - codeparrot_training - Step 2194: {'lr': 0.0004979769753013828, 'samples': 1123840, 'steps': 2194, 'batch_loss/train': 0.9124226216226816} +12/21/2021 21:25:27 - INFO - codeparrot_training - Step 2195: {'lr': 0.0004979749735287564, 'samples': 1124352, 'steps': 2195, 'batch_loss/train': 0.857542373239994} +12/21/2021 21:25:41 - INFO - codeparrot_training - Step 2196: {'lr': 0.0004979729707702752, 'samples': 1124864, 'steps': 2196, 'batch_loss/train': 0.8491443232633173} +12/21/2021 21:25:51 - INFO - codeparrot_training - Step 2197: {'lr': 0.0004979709670259471, 'samples': 1125376, 'steps': 2197, 'batch_loss/train': 1.846842444036156} +12/21/2021 21:26:02 - INFO - codeparrot_training - Step 2198: {'lr': 0.0004979689622957802, 'samples': 1125888, 'steps': 2198, 'batch_loss/train': 0.6682246718555689} +12/21/2021 21:26:12 - INFO - codeparrot_training - Step 2199: {'lr': 0.000497966956579782, 'samples': 1126400, 'steps': 2199, 'batch_loss/train': 0.9120249319821596} +12/21/2021 21:26:25 - INFO - codeparrot_training - Step 2200: {'lr': 0.0004979649498779612, 'samples': 1126912, 'steps': 2200, 'batch_loss/train': 1.71440254105255} +12/21/2021 21:26:35 - INFO - codeparrot_training - Step 2201: {'lr': 0.0004979629421903251, 'samples': 1127424, 'steps': 2201, 'batch_loss/train': 0.9875760222785175} +12/21/2021 21:26:46 - INFO - codeparrot_training - Step 2202: {'lr': 0.000497960933516882, 'samples': 1127936, 'steps': 2202, 'batch_loss/train': 0.7724146912805736} +12/21/2021 21:26:58 - INFO - codeparrot_training - Step 2203: {'lr': 0.00049795892385764, 'samples': 1128448, 'steps': 2203, 'batch_loss/train': 0.8396572517231107} +12/21/2021 21:27:08 - INFO - codeparrot_training - Step 2204: {'lr': 0.0004979569132126069, 'samples': 1128960, 'steps': 2204, 'batch_loss/train': 0.8988684583455324} +12/21/2021 21:27:19 - INFO - codeparrot_training - Step 2205: {'lr': 0.0004979549015817907, 'samples': 1129472, 'steps': 2205, 'batch_loss/train': 0.8194378204643726} +12/21/2021 21:27:32 - INFO - codeparrot_training - Step 2206: {'lr': 0.0004979528889651994, 'samples': 1129984, 'steps': 2206, 'batch_loss/train': 0.9286805298179388} +12/21/2021 21:27:43 - INFO - codeparrot_training - Step 2207: {'lr': 0.0004979508753628411, 'samples': 1130496, 'steps': 2207, 'batch_loss/train': 0.7505495395162143} +12/21/2021 21:27:53 - INFO - codeparrot_training - Step 2208: {'lr': 0.0004979488607747238, 'samples': 1131008, 'steps': 2208, 'batch_loss/train': 0.8061871505342424} +12/21/2021 21:28:04 - INFO - codeparrot_training - Step 2209: {'lr': 0.0004979468452008554, 'samples': 1131520, 'steps': 2209, 'batch_loss/train': 0.7535290666855872} +12/21/2021 21:28:16 - INFO - codeparrot_training - Step 2210: {'lr': 0.0004979448286412438, 'samples': 1132032, 'steps': 2210, 'batch_loss/train': 1.0124948769807816} +12/21/2021 21:28:27 - INFO - codeparrot_training - Step 2211: {'lr': 0.0004979428110958974, 'samples': 1132544, 'steps': 2211, 'batch_loss/train': 0.8996817008592188} +12/21/2021 21:28:37 - INFO - codeparrot_training - Step 2212: {'lr': 0.0004979407925648239, 'samples': 1133056, 'steps': 2212, 'batch_loss/train': 0.9879712192341685} +12/21/2021 21:28:49 - INFO - codeparrot_training - Step 2213: {'lr': 0.0004979387730480314, 'samples': 1133568, 'steps': 2213, 'batch_loss/train': 0.8593467073515058} +12/21/2021 21:29:00 - INFO - codeparrot_training - Step 2214: {'lr': 0.000497936752545528, 'samples': 1134080, 'steps': 2214, 'batch_loss/train': 0.9214249616488814} +12/21/2021 21:29:10 - INFO - codeparrot_training - Step 2215: {'lr': 0.0004979347310573217, 'samples': 1134592, 'steps': 2215, 'batch_loss/train': 0.814708998426795} +12/21/2021 21:29:23 - INFO - codeparrot_training - Step 2216: {'lr': 0.0004979327085834205, 'samples': 1135104, 'steps': 2216, 'batch_loss/train': 0.8611375791952014} +12/21/2021 21:29:34 - INFO - codeparrot_training - Step 2217: {'lr': 0.0004979306851238325, 'samples': 1135616, 'steps': 2217, 'batch_loss/train': 0.8968394612893462} +12/21/2021 21:29:44 - INFO - codeparrot_training - Step 2218: {'lr': 0.0004979286606785657, 'samples': 1136128, 'steps': 2218, 'batch_loss/train': 0.9267248502001166} +12/21/2021 21:29:55 - INFO - codeparrot_training - Step 2219: {'lr': 0.0004979266352476281, 'samples': 1136640, 'steps': 2219, 'batch_loss/train': 0.9810216510668397} +12/21/2021 21:30:07 - INFO - codeparrot_training - Step 2220: {'lr': 0.0004979246088310279, 'samples': 1137152, 'steps': 2220, 'batch_loss/train': 0.8797378921881318} +12/21/2021 21:30:17 - INFO - codeparrot_training - Step 2221: {'lr': 0.0004979225814287731, 'samples': 1137664, 'steps': 2221, 'batch_loss/train': 0.8825861206278205} +12/21/2021 21:30:28 - INFO - codeparrot_training - Step 2222: {'lr': 0.0004979205530408716, 'samples': 1138176, 'steps': 2222, 'batch_loss/train': 0.8596117487177253} +12/21/2021 21:30:40 - INFO - codeparrot_training - Step 2223: {'lr': 0.0004979185236673317, 'samples': 1138688, 'steps': 2223, 'batch_loss/train': 0.7715544956736267} +12/21/2021 21:30:51 - INFO - codeparrot_training - Step 2224: {'lr': 0.0004979164933081613, 'samples': 1139200, 'steps': 2224, 'batch_loss/train': 0.6525041048880666} +12/21/2021 21:31:02 - INFO - codeparrot_training - Step 2225: {'lr': 0.0004979144619633685, 'samples': 1139712, 'steps': 2225, 'batch_loss/train': 0.6971809483366087} +12/21/2021 21:31:15 - INFO - codeparrot_training - Step 2226: {'lr': 0.0004979124296329615, 'samples': 1140224, 'steps': 2226, 'batch_loss/train': 1.041741375811398} +12/21/2021 21:31:25 - INFO - codeparrot_training - Step 2227: {'lr': 0.0004979103963169482, 'samples': 1140736, 'steps': 2227, 'batch_loss/train': 0.709704970009625} +12/21/2021 21:31:36 - INFO - codeparrot_training - Step 2228: {'lr': 0.0004979083620153368, 'samples': 1141248, 'steps': 2228, 'batch_loss/train': 0.9767834870144725} +12/21/2021 21:31:46 - INFO - codeparrot_training - Step 2229: {'lr': 0.0004979063267281354, 'samples': 1141760, 'steps': 2229, 'batch_loss/train': 0.859210027847439} +12/21/2021 21:31:58 - INFO - codeparrot_training - Step 2230: {'lr': 0.000497904290455352, 'samples': 1142272, 'steps': 2230, 'batch_loss/train': 0.8877223068848252} +12/21/2021 21:32:09 - INFO - codeparrot_training - Step 2231: {'lr': 0.0004979022531969948, 'samples': 1142784, 'steps': 2231, 'batch_loss/train': 0.9415205167606473} +12/21/2021 21:32:20 - INFO - codeparrot_training - Step 2232: {'lr': 0.0004979002149530718, 'samples': 1143296, 'steps': 2232, 'batch_loss/train': 0.8467596904374659} +12/21/2021 21:32:32 - INFO - codeparrot_training - Step 2233: {'lr': 0.0004978981757235912, 'samples': 1143808, 'steps': 2233, 'batch_loss/train': 0.8203006573021412} +12/21/2021 21:32:42 - INFO - codeparrot_training - Step 2234: {'lr': 0.000497896135508561, 'samples': 1144320, 'steps': 2234, 'batch_loss/train': 0.9310867032036185} +12/21/2021 21:32:53 - INFO - codeparrot_training - Step 2235: {'lr': 0.0004978940943079894, 'samples': 1144832, 'steps': 2235, 'batch_loss/train': 0.8537202458828688} +12/21/2021 21:33:06 - INFO - codeparrot_training - Step 2236: {'lr': 0.0004978920521218845, 'samples': 1145344, 'steps': 2236, 'batch_loss/train': 0.8510296512395144} +12/21/2021 21:33:16 - INFO - codeparrot_training - Step 2237: {'lr': 0.0004978900089502543, 'samples': 1145856, 'steps': 2237, 'batch_loss/train': 0.7684935056604445} +12/21/2021 21:33:27 - INFO - codeparrot_training - Step 2238: {'lr': 0.0004978879647931072, 'samples': 1146368, 'steps': 2238, 'batch_loss/train': 0.8555542253889143} +12/21/2021 21:33:38 - INFO - codeparrot_training - Step 2239: {'lr': 0.000497885919650451, 'samples': 1146880, 'steps': 2239, 'batch_loss/train': 0.8755789836868644} +12/21/2021 21:33:50 - INFO - codeparrot_training - Step 2240: {'lr': 0.0004978838735222941, 'samples': 1147392, 'steps': 2240, 'batch_loss/train': 0.7839141879230738} +12/21/2021 21:34:00 - INFO - codeparrot_training - Step 2241: {'lr': 0.0004978818264086444, 'samples': 1147904, 'steps': 2241, 'batch_loss/train': 0.8618870414793491} +12/21/2021 21:34:11 - INFO - codeparrot_training - Step 2242: {'lr': 0.0004978797783095102, 'samples': 1148416, 'steps': 2242, 'batch_loss/train': 0.8104640068486333} +12/21/2021 21:34:23 - INFO - codeparrot_training - Step 2243: {'lr': 0.0004978777292248996, 'samples': 1148928, 'steps': 2243, 'batch_loss/train': 0.8643834972754121} +12/21/2021 21:34:34 - INFO - codeparrot_training - Step 2244: {'lr': 0.0004978756791548207, 'samples': 1149440, 'steps': 2244, 'batch_loss/train': 0.8470911365002394} +12/21/2021 21:34:45 - INFO - codeparrot_training - Step 2245: {'lr': 0.0004978736280992817, 'samples': 1149952, 'steps': 2245, 'batch_loss/train': 0.8103189882822335} +12/21/2021 21:34:55 - INFO - codeparrot_training - Step 2246: {'lr': 0.0004978715760582908, 'samples': 1150464, 'steps': 2246, 'batch_loss/train': 0.8458332307636738} +12/21/2021 21:35:07 - INFO - codeparrot_training - Step 2247: {'lr': 0.0004978695230318561, 'samples': 1150976, 'steps': 2247, 'batch_loss/train': 0.8244990287348628} +12/21/2021 21:35:18 - INFO - codeparrot_training - Step 2248: {'lr': 0.0004978674690199858, 'samples': 1151488, 'steps': 2248, 'batch_loss/train': 0.9097336670383811} +12/21/2021 21:35:28 - INFO - codeparrot_training - Step 2249: {'lr': 0.0004978654140226879, 'samples': 1152000, 'steps': 2249, 'batch_loss/train': 0.9904982983134687} +12/21/2021 21:35:40 - INFO - codeparrot_training - Step 2250: {'lr': 0.0004978633580399708, 'samples': 1152512, 'steps': 2250, 'batch_loss/train': 0.8670511143282056} +12/21/2021 21:35:51 - INFO - codeparrot_training - Step 2251: {'lr': 0.0004978613010718426, 'samples': 1153024, 'steps': 2251, 'batch_loss/train': 0.8320633759722114} +12/21/2021 21:36:02 - INFO - codeparrot_training - Step 2252: {'lr': 0.0004978592431183114, 'samples': 1153536, 'steps': 2252, 'batch_loss/train': 0.8419060765299946} +12/21/2021 21:36:14 - INFO - codeparrot_training - Step 2253: {'lr': 0.0004978571841793855, 'samples': 1154048, 'steps': 2253, 'batch_loss/train': 0.902762464247644} +12/21/2021 21:36:25 - INFO - codeparrot_training - Step 2254: {'lr': 0.000497855124255073, 'samples': 1154560, 'steps': 2254, 'batch_loss/train': 0.8537281826138496} +12/21/2021 21:36:35 - INFO - codeparrot_training - Step 2255: {'lr': 0.0004978530633453821, 'samples': 1155072, 'steps': 2255, 'batch_loss/train': 0.9316979879513383} +12/21/2021 21:36:47 - INFO - codeparrot_training - Step 2256: {'lr': 0.000497851001450321, 'samples': 1155584, 'steps': 2256, 'batch_loss/train': 0.8397816433571279} +12/21/2021 21:36:58 - INFO - codeparrot_training - Step 2257: {'lr': 0.000497848938569898, 'samples': 1156096, 'steps': 2257, 'batch_loss/train': 0.9209824232384562} +12/21/2021 21:37:09 - INFO - codeparrot_training - Step 2258: {'lr': 0.000497846874704121, 'samples': 1156608, 'steps': 2258, 'batch_loss/train': 0.9315807013772428} +12/21/2021 21:37:19 - INFO - codeparrot_training - Step 2259: {'lr': 0.0004978448098529986, 'samples': 1157120, 'steps': 2259, 'batch_loss/train': 0.8902814593166113} +12/21/2021 21:37:31 - INFO - codeparrot_training - Step 2260: {'lr': 0.0004978427440165388, 'samples': 1157632, 'steps': 2260, 'batch_loss/train': 0.8946724468842149} +12/21/2021 21:37:42 - INFO - codeparrot_training - Step 2261: {'lr': 0.0004978406771947496, 'samples': 1158144, 'steps': 2261, 'batch_loss/train': 0.8967864457517862} +12/21/2021 21:37:52 - INFO - codeparrot_training - Step 2262: {'lr': 0.0004978386093876396, 'samples': 1158656, 'steps': 2262, 'batch_loss/train': 0.883538038469851} +12/21/2021 21:38:04 - INFO - codeparrot_training - Step 2263: {'lr': 0.000497836540595217, 'samples': 1159168, 'steps': 2263, 'batch_loss/train': 0.9213018584996462} +12/21/2021 21:38:15 - INFO - codeparrot_training - Step 2264: {'lr': 0.0004978344708174898, 'samples': 1159680, 'steps': 2264, 'batch_loss/train': 0.9337521586567163} +12/21/2021 21:38:26 - INFO - codeparrot_training - Step 2265: {'lr': 0.0004978324000544663, 'samples': 1160192, 'steps': 2265, 'batch_loss/train': 0.856205290183425} +12/21/2021 21:38:39 - INFO - codeparrot_training - Step 2266: {'lr': 0.0004978303283061547, 'samples': 1160704, 'steps': 2266, 'batch_loss/train': 0.8010878721252084} +12/21/2021 21:38:50 - INFO - codeparrot_training - Step 2267: {'lr': 0.0004978282555725634, 'samples': 1161216, 'steps': 2267, 'batch_loss/train': 2.1426543900743127} +12/21/2021 21:39:00 - INFO - codeparrot_training - Step 2268: {'lr': 0.0004978261818537005, 'samples': 1161728, 'steps': 2268, 'batch_loss/train': 0.9220291841775179} +12/21/2021 21:39:11 - INFO - codeparrot_training - Step 2269: {'lr': 0.0004978241071495743, 'samples': 1162240, 'steps': 2269, 'batch_loss/train': 0.8712067613378167} +12/21/2021 21:39:23 - INFO - codeparrot_training - Step 2270: {'lr': 0.000497822031460193, 'samples': 1162752, 'steps': 2270, 'batch_loss/train': 0.8895259560085833} +12/21/2021 21:39:34 - INFO - codeparrot_training - Step 2271: {'lr': 0.0004978199547855649, 'samples': 1163264, 'steps': 2271, 'batch_loss/train': 1.0714044999331236} +12/21/2021 21:39:44 - INFO - codeparrot_training - Step 2272: {'lr': 0.0004978178771256981, 'samples': 1163776, 'steps': 2272, 'batch_loss/train': 0.9201099090278149} +12/21/2021 21:39:56 - INFO - codeparrot_training - Step 2273: {'lr': 0.0004978157984806011, 'samples': 1164288, 'steps': 2273, 'batch_loss/train': 0.7963182115927339} +12/21/2021 21:40:07 - INFO - codeparrot_training - Step 2274: {'lr': 0.0004978137188502821, 'samples': 1164800, 'steps': 2274, 'batch_loss/train': 0.8720248723402619} +12/21/2021 21:40:18 - INFO - codeparrot_training - Step 2275: {'lr': 0.0004978116382347493, 'samples': 1165312, 'steps': 2275, 'batch_loss/train': 0.8976620621979237} +12/21/2021 21:40:30 - INFO - codeparrot_training - Step 2276: {'lr': 0.0004978095566340109, 'samples': 1165824, 'steps': 2276, 'batch_loss/train': 0.8699776418507099} +12/21/2021 21:40:41 - INFO - codeparrot_training - Step 2277: {'lr': 0.0004978074740480754, 'samples': 1166336, 'steps': 2277, 'batch_loss/train': 0.8808933347463608} +12/21/2021 21:40:52 - INFO - codeparrot_training - Step 2278: {'lr': 0.0004978053904769508, 'samples': 1166848, 'steps': 2278, 'batch_loss/train': 0.9219242474064231} +12/21/2021 21:41:02 - INFO - codeparrot_training - Step 2279: {'lr': 0.0004978033059206457, 'samples': 1167360, 'steps': 2279, 'batch_loss/train': 0.815322594717145} +12/21/2021 21:41:14 - INFO - codeparrot_training - Step 2280: {'lr': 0.0004978012203791681, 'samples': 1167872, 'steps': 2280, 'batch_loss/train': 0.8727032486349344} +12/21/2021 21:41:25 - INFO - codeparrot_training - Step 2281: {'lr': 0.0004977991338525266, 'samples': 1168384, 'steps': 2281, 'batch_loss/train': 0.8825947418808937} +12/21/2021 21:41:35 - INFO - codeparrot_training - Step 2282: {'lr': 0.0004977970463407291, 'samples': 1168896, 'steps': 2282, 'batch_loss/train': 0.9200711483135819} +12/21/2021 21:41:48 - INFO - codeparrot_training - Step 2283: {'lr': 0.0004977949578437842, 'samples': 1169408, 'steps': 2283, 'batch_loss/train': 0.9378671711310744} +12/21/2021 21:41:59 - INFO - codeparrot_training - Step 2284: {'lr': 0.0004977928683617, 'samples': 1169920, 'steps': 2284, 'batch_loss/train': 1.332573532126844} +12/21/2021 21:42:09 - INFO - codeparrot_training - Step 2285: {'lr': 0.0004977907778944851, 'samples': 1170432, 'steps': 2285, 'batch_loss/train': 1.3997020409442484} +12/21/2021 21:42:21 - INFO - codeparrot_training - Step 2286: {'lr': 0.0004977886864421475, 'samples': 1170944, 'steps': 2286, 'batch_loss/train': 0.8744787750765681} +12/21/2021 21:42:32 - INFO - codeparrot_training - Step 2287: {'lr': 0.0004977865940046958, 'samples': 1171456, 'steps': 2287, 'batch_loss/train': 0.8960451623424888} +12/21/2021 21:42:42 - INFO - codeparrot_training - Step 2288: {'lr': 0.000497784500582138, 'samples': 1171968, 'steps': 2288, 'batch_loss/train': 0.837315577082336} +12/21/2021 21:42:53 - INFO - codeparrot_training - Step 2289: {'lr': 0.0004977824061744826, 'samples': 1172480, 'steps': 2289, 'batch_loss/train': 0.8370909532532096} +12/21/2021 21:43:05 - INFO - codeparrot_training - Step 2290: {'lr': 0.0004977803107817379, 'samples': 1172992, 'steps': 2290, 'batch_loss/train': 0.8417616076767445} +12/21/2021 21:43:16 - INFO - codeparrot_training - Step 2291: {'lr': 0.0004977782144039123, 'samples': 1173504, 'steps': 2291, 'batch_loss/train': 0.8052168441936374} +12/21/2021 21:43:26 - INFO - codeparrot_training - Step 2292: {'lr': 0.0004977761170410142, 'samples': 1174016, 'steps': 2292, 'batch_loss/train': 0.8740883832797408} +12/21/2021 21:43:39 - INFO - codeparrot_training - Step 2293: {'lr': 0.0004977740186930515, 'samples': 1174528, 'steps': 2293, 'batch_loss/train': 0.8501850543543696} +12/21/2021 21:43:50 - INFO - codeparrot_training - Step 2294: {'lr': 0.000497771919360033, 'samples': 1175040, 'steps': 2294, 'batch_loss/train': 0.7779655768536031} +12/21/2021 21:44:00 - INFO - codeparrot_training - Step 2295: {'lr': 0.0004977698190419671, 'samples': 1175552, 'steps': 2295, 'batch_loss/train': 0.8492563883773983} +12/21/2021 21:44:12 - INFO - codeparrot_training - Step 2296: {'lr': 0.0004977677177388617, 'samples': 1176064, 'steps': 2296, 'batch_loss/train': 0.9158772830851376} +12/21/2021 21:44:23 - INFO - codeparrot_training - Step 2297: {'lr': 0.0004977656154507255, 'samples': 1176576, 'steps': 2297, 'batch_loss/train': 0.8607986383140087} +12/21/2021 21:44:34 - INFO - codeparrot_training - Step 2298: {'lr': 0.0004977635121775668, 'samples': 1177088, 'steps': 2298, 'batch_loss/train': 0.9793339476455003} +12/21/2021 21:44:44 - INFO - codeparrot_training - Step 2299: {'lr': 0.0004977614079193939, 'samples': 1177600, 'steps': 2299, 'batch_loss/train': 0.789015160407871} +12/21/2021 21:44:57 - INFO - codeparrot_training - Step 2300: {'lr': 0.0004977593026762152, 'samples': 1178112, 'steps': 2300, 'batch_loss/train': 0.947744095697999} +12/21/2021 21:45:08 - INFO - codeparrot_training - Step 2301: {'lr': 0.0004977571964480391, 'samples': 1178624, 'steps': 2301, 'batch_loss/train': 0.8814868815243244} +12/21/2021 21:45:18 - INFO - codeparrot_training - Step 2302: {'lr': 0.0004977550892348738, 'samples': 1179136, 'steps': 2302, 'batch_loss/train': 0.9349094778299332} +12/21/2021 21:45:30 - INFO - codeparrot_training - Step 2303: {'lr': 0.0004977529810367279, 'samples': 1179648, 'steps': 2303, 'batch_loss/train': 0.8994383979588747} +12/21/2021 21:45:41 - INFO - codeparrot_training - Step 2304: {'lr': 0.0004977508718536097, 'samples': 1180160, 'steps': 2304, 'batch_loss/train': 0.9291807231493294} +12/21/2021 21:45:51 - INFO - codeparrot_training - Step 2305: {'lr': 0.0004977487616855276, 'samples': 1180672, 'steps': 2305, 'batch_loss/train': 0.8446083217859268} +12/21/2021 21:46:03 - INFO - codeparrot_training - Step 2306: {'lr': 0.0004977466505324899, 'samples': 1181184, 'steps': 2306, 'batch_loss/train': 0.9765346995554864} +12/21/2021 21:46:14 - INFO - codeparrot_training - Step 2307: {'lr': 0.0004977445383945051, 'samples': 1181696, 'steps': 2307, 'batch_loss/train': 0.8346361583098769} +12/21/2021 21:46:24 - INFO - codeparrot_training - Step 2308: {'lr': 0.0004977424252715816, 'samples': 1182208, 'steps': 2308, 'batch_loss/train': 0.9940981734544039} +12/21/2021 21:46:37 - INFO - codeparrot_training - Step 2309: {'lr': 0.0004977403111637278, 'samples': 1182720, 'steps': 2309, 'batch_loss/train': 0.823179142549634} +12/21/2021 21:46:47 - INFO - codeparrot_training - Step 2310: {'lr': 0.000497738196070952, 'samples': 1183232, 'steps': 2310, 'batch_loss/train': 0.9323650067672133} +12/21/2021 21:46:58 - INFO - codeparrot_training - Step 2311: {'lr': 0.0004977360799932627, 'samples': 1183744, 'steps': 2311, 'batch_loss/train': 1.1666291635483503} +12/21/2021 21:47:09 - INFO - codeparrot_training - Step 2312: {'lr': 0.0004977339629306682, 'samples': 1184256, 'steps': 2312, 'batch_loss/train': 0.8491664617322385} +12/21/2021 21:47:21 - INFO - codeparrot_training - Step 2313: {'lr': 0.000497731844883177, 'samples': 1184768, 'steps': 2313, 'batch_loss/train': 0.7647358803078532} +12/21/2021 21:47:32 - INFO - codeparrot_training - Step 2314: {'lr': 0.0004977297258507977, 'samples': 1185280, 'steps': 2314, 'batch_loss/train': 0.8725790660828352} +12/21/2021 21:47:43 - INFO - codeparrot_training - Step 2315: {'lr': 0.0004977276058335384, 'samples': 1185792, 'steps': 2315, 'batch_loss/train': 0.8600598387420177} +12/21/2021 21:47:55 - INFO - codeparrot_training - Step 2316: {'lr': 0.0004977254848314077, 'samples': 1186304, 'steps': 2316, 'batch_loss/train': 0.8896271474659443} +12/21/2021 21:48:05 - INFO - codeparrot_training - Step 2317: {'lr': 0.0004977233628444141, 'samples': 1186816, 'steps': 2317, 'batch_loss/train': 0.8423408018425107} +12/21/2021 21:48:16 - INFO - codeparrot_training - Step 2318: {'lr': 0.0004977212398725658, 'samples': 1187328, 'steps': 2318, 'batch_loss/train': 0.8389368960633874} +12/21/2021 21:48:26 - INFO - codeparrot_training - Step 2319: {'lr': 0.0004977191159158715, 'samples': 1187840, 'steps': 2319, 'batch_loss/train': 0.8435713793151081} +12/21/2021 21:48:39 - INFO - codeparrot_training - Step 2320: {'lr': 0.0004977169909743395, 'samples': 1188352, 'steps': 2320, 'batch_loss/train': 0.8436617329716682} +12/21/2021 21:48:49 - INFO - codeparrot_training - Step 2321: {'lr': 0.0004977148650479782, 'samples': 1188864, 'steps': 2321, 'batch_loss/train': 0.8810571497306228} +12/21/2021 21:49:00 - INFO - codeparrot_training - Step 2322: {'lr': 0.0004977127381367962, 'samples': 1189376, 'steps': 2322, 'batch_loss/train': 0.6939816528465599} +12/21/2021 21:49:13 - INFO - codeparrot_training - Step 2323: {'lr': 0.0004977106102408019, 'samples': 1189888, 'steps': 2323, 'batch_loss/train': 0.7772838626988232} +12/21/2021 21:49:23 - INFO - codeparrot_training - Step 2324: {'lr': 0.0004977084813600037, 'samples': 1190400, 'steps': 2324, 'batch_loss/train': 0.8892768668010831} +12/21/2021 21:49:34 - INFO - codeparrot_training - Step 2325: {'lr': 0.0004977063514944102, 'samples': 1190912, 'steps': 2325, 'batch_loss/train': 0.8198740109801292} +12/21/2021 21:49:46 - INFO - codeparrot_training - Step 2326: {'lr': 0.0004977042206440297, 'samples': 1191424, 'steps': 2326, 'batch_loss/train': 1.145355949178338} +12/21/2021 21:49:57 - INFO - codeparrot_training - Step 2327: {'lr': 0.0004977020888088708, 'samples': 1191936, 'steps': 2327, 'batch_loss/train': 1.0620467979460955} +12/21/2021 21:50:07 - INFO - codeparrot_training - Step 2328: {'lr': 0.0004976999559889419, 'samples': 1192448, 'steps': 2328, 'batch_loss/train': 0.9247380411252379} +12/21/2021 21:50:18 - INFO - codeparrot_training - Step 2329: {'lr': 0.0004976978221842514, 'samples': 1192960, 'steps': 2329, 'batch_loss/train': 0.7741743242368102} +12/21/2021 21:50:31 - INFO - codeparrot_training - Step 2330: {'lr': 0.000497695687394808, 'samples': 1193472, 'steps': 2330, 'batch_loss/train': 0.8631991744041443} +12/21/2021 21:50:41 - INFO - codeparrot_training - Step 2331: {'lr': 0.0004976935516206201, 'samples': 1193984, 'steps': 2331, 'batch_loss/train': 0.8145364029332995} +12/21/2021 21:50:52 - INFO - codeparrot_training - Step 2332: {'lr': 0.0004976914148616961, 'samples': 1194496, 'steps': 2332, 'batch_loss/train': 0.9249777430668473} +12/21/2021 21:51:04 - INFO - codeparrot_training - Step 2333: {'lr': 0.0004976892771180446, 'samples': 1195008, 'steps': 2333, 'batch_loss/train': 0.9417273616418242} +12/21/2021 21:51:14 - INFO - codeparrot_training - Step 2334: {'lr': 0.000497687138389674, 'samples': 1195520, 'steps': 2334, 'batch_loss/train': 0.8532694061286747} +12/21/2021 21:51:25 - INFO - codeparrot_training - Step 2335: {'lr': 0.0004976849986765929, 'samples': 1196032, 'steps': 2335, 'batch_loss/train': 0.871632250957191} +12/21/2021 21:51:37 - INFO - codeparrot_training - Step 2336: {'lr': 0.0004976828579788098, 'samples': 1196544, 'steps': 2336, 'batch_loss/train': 0.91069849813357} +12/21/2021 21:51:48 - INFO - codeparrot_training - Step 2337: {'lr': 0.000497680716296333, 'samples': 1197056, 'steps': 2337, 'batch_loss/train': 0.8804760938510299} +12/21/2021 21:51:58 - INFO - codeparrot_training - Step 2338: {'lr': 0.0004976785736291714, 'samples': 1197568, 'steps': 2338, 'batch_loss/train': 0.9022408267483115} +12/21/2021 21:52:11 - INFO - codeparrot_training - Step 2339: {'lr': 0.0004976764299773332, 'samples': 1198080, 'steps': 2339, 'batch_loss/train': 0.9523665346205235} +12/21/2021 21:52:22 - INFO - codeparrot_training - Step 2340: {'lr': 0.0004976742853408272, 'samples': 1198592, 'steps': 2340, 'batch_loss/train': 0.934666195884347} +12/21/2021 21:52:32 - INFO - codeparrot_training - Step 2341: {'lr': 0.0004976721397196615, 'samples': 1199104, 'steps': 2341, 'batch_loss/train': 0.8503328179940581} +12/21/2021 21:52:43 - INFO - codeparrot_training - Step 2342: {'lr': 0.0004976699931138451, 'samples': 1199616, 'steps': 2342, 'batch_loss/train': 0.9063902031630278} +12/21/2021 21:52:55 - INFO - codeparrot_training - Step 2343: {'lr': 0.0004976678455233861, 'samples': 1200128, 'steps': 2343, 'batch_loss/train': 0.9257733291015029} +12/21/2021 21:53:06 - INFO - codeparrot_training - Step 2344: {'lr': 0.0004976656969482936, 'samples': 1200640, 'steps': 2344, 'batch_loss/train': 0.923200418241322} +12/21/2021 21:53:16 - INFO - codeparrot_training - Step 2345: {'lr': 0.0004976635473885755, 'samples': 1201152, 'steps': 2345, 'batch_loss/train': 0.9614932350814342} +12/21/2021 21:53:29 - INFO - codeparrot_training - Step 2346: {'lr': 0.0004976613968442408, 'samples': 1201664, 'steps': 2346, 'batch_loss/train': 1.7225259644910693} +12/21/2021 21:53:39 - INFO - codeparrot_training - Step 2347: {'lr': 0.0004976592453152978, 'samples': 1202176, 'steps': 2347, 'batch_loss/train': 0.874820230063051} +12/21/2021 21:53:50 - INFO - codeparrot_training - Step 2348: {'lr': 0.0004976570928017553, 'samples': 1202688, 'steps': 2348, 'batch_loss/train': 0.9124313183128834} +12/21/2021 21:54:02 - INFO - codeparrot_training - Step 2349: {'lr': 0.0004976549393036216, 'samples': 1203200, 'steps': 2349, 'batch_loss/train': 0.9434279901906848} +12/21/2021 21:54:12 - INFO - codeparrot_training - Step 2350: {'lr': 0.0004976527848209053, 'samples': 1203712, 'steps': 2350, 'batch_loss/train': 0.8958991011604667} +12/21/2021 21:54:23 - INFO - codeparrot_training - Step 2351: {'lr': 0.0004976506293536153, 'samples': 1204224, 'steps': 2351, 'batch_loss/train': 0.9379781316965818} +12/21/2021 21:54:34 - INFO - codeparrot_training - Step 2352: {'lr': 0.0004976484729017597, 'samples': 1204736, 'steps': 2352, 'batch_loss/train': 0.9066098197363317} +12/21/2021 21:54:46 - INFO - codeparrot_training - Step 2353: {'lr': 0.0004976463154653473, 'samples': 1205248, 'steps': 2353, 'batch_loss/train': 0.9837380452081561} +12/21/2021 21:54:57 - INFO - codeparrot_training - Step 2354: {'lr': 0.0004976441570443867, 'samples': 1205760, 'steps': 2354, 'batch_loss/train': 0.9686786141246557} +12/21/2021 21:55:07 - INFO - codeparrot_training - Step 2355: {'lr': 0.0004976419976388864, 'samples': 1206272, 'steps': 2355, 'batch_loss/train': 0.9933761283755302} +12/21/2021 21:55:20 - INFO - codeparrot_training - Step 2356: {'lr': 0.000497639837248855, 'samples': 1206784, 'steps': 2356, 'batch_loss/train': 0.9119098447263241} +12/21/2021 21:55:30 - INFO - codeparrot_training - Step 2357: {'lr': 0.0004976376758743012, 'samples': 1207296, 'steps': 2357, 'batch_loss/train': 0.9667276339605451} +12/21/2021 21:55:41 - INFO - codeparrot_training - Step 2358: {'lr': 0.0004976355135152334, 'samples': 1207808, 'steps': 2358, 'batch_loss/train': 0.9433298241347075} +12/21/2021 21:55:53 - INFO - codeparrot_training - Step 2359: {'lr': 0.0004976333501716605, 'samples': 1208320, 'steps': 2359, 'batch_loss/train': 0.8725891700014472} +12/21/2021 21:56:03 - INFO - codeparrot_training - Step 2360: {'lr': 0.0004976311858435906, 'samples': 1208832, 'steps': 2360, 'batch_loss/train': 0.9047106020152569} +12/21/2021 21:56:14 - INFO - codeparrot_training - Step 2361: {'lr': 0.0004976290205310329, 'samples': 1209344, 'steps': 2361, 'batch_loss/train': 0.9422816019505262} +12/21/2021 21:56:24 - INFO - codeparrot_training - Step 2362: {'lr': 0.0004976268542339955, 'samples': 1209856, 'steps': 2362, 'batch_loss/train': 0.8470016540959477} +12/21/2021 21:56:37 - INFO - codeparrot_training - Step 2363: {'lr': 0.0004976246869524873, 'samples': 1210368, 'steps': 2363, 'batch_loss/train': 0.970789322629571} +12/21/2021 21:56:48 - INFO - codeparrot_training - Step 2364: {'lr': 0.0004976225186865169, 'samples': 1210880, 'steps': 2364, 'batch_loss/train': 1.028847231529653} +12/21/2021 21:56:59 - INFO - codeparrot_training - Step 2365: {'lr': 0.0004976203494360929, 'samples': 1211392, 'steps': 2365, 'batch_loss/train': 0.8853037673979998} +12/21/2021 21:57:11 - INFO - codeparrot_training - Step 2366: {'lr': 0.0004976181792012238, 'samples': 1211904, 'steps': 2366, 'batch_loss/train': 0.8265939480625093} +12/21/2021 21:57:21 - INFO - codeparrot_training - Step 2367: {'lr': 0.0004976160079819183, 'samples': 1212416, 'steps': 2367, 'batch_loss/train': 0.8612535633146763} +12/21/2021 21:57:32 - INFO - codeparrot_training - Step 2368: {'lr': 0.000497613835778185, 'samples': 1212928, 'steps': 2368, 'batch_loss/train': 0.9739093212410808} +12/21/2021 21:57:44 - INFO - codeparrot_training - Step 2369: {'lr': 0.0004976116625900327, 'samples': 1213440, 'steps': 2369, 'batch_loss/train': 0.86505445279181} +12/21/2021 21:57:55 - INFO - codeparrot_training - Step 2370: {'lr': 0.0004976094884174699, 'samples': 1213952, 'steps': 2370, 'batch_loss/train': 0.9953653095290065} +12/21/2021 21:58:06 - INFO - codeparrot_training - Step 2371: {'lr': 0.0004976073132605053, 'samples': 1214464, 'steps': 2371, 'batch_loss/train': 0.9579923544079065} +12/21/2021 21:58:18 - INFO - codeparrot_training - Step 2372: {'lr': 0.0004976051371191474, 'samples': 1214976, 'steps': 2372, 'batch_loss/train': 0.8554048575460911} +12/21/2021 21:58:28 - INFO - codeparrot_training - Step 2373: {'lr': 0.0004976029599934051, 'samples': 1215488, 'steps': 2373, 'batch_loss/train': 0.9077753610908985} +12/21/2021 21:58:39 - INFO - codeparrot_training - Step 2374: {'lr': 0.0004976007818832868, 'samples': 1216000, 'steps': 2374, 'batch_loss/train': 1.0730564780533314} +12/21/2021 21:58:49 - INFO - codeparrot_training - Step 2375: {'lr': 0.0004975986027888013, 'samples': 1216512, 'steps': 2375, 'batch_loss/train': 0.8591835740953684} +12/21/2021 21:59:01 - INFO - codeparrot_training - Step 2376: {'lr': 0.0004975964227099573, 'samples': 1217024, 'steps': 2376, 'batch_loss/train': 1.1030665198341012} +12/21/2021 21:59:12 - INFO - codeparrot_training - Step 2377: {'lr': 0.0004975942416467633, 'samples': 1217536, 'steps': 2377, 'batch_loss/train': 0.8398629724979401} +12/21/2021 21:59:23 - INFO - codeparrot_training - Step 2378: {'lr': 0.0004975920595992283, 'samples': 1218048, 'steps': 2378, 'batch_loss/train': 0.8421083074063063} +12/21/2021 21:59:36 - INFO - codeparrot_training - Step 2379: {'lr': 0.0004975898765673607, 'samples': 1218560, 'steps': 2379, 'batch_loss/train': 1.0005822032690048} +12/21/2021 21:59:46 - INFO - codeparrot_training - Step 2380: {'lr': 0.000497587692551169, 'samples': 1219072, 'steps': 2380, 'batch_loss/train': 0.8523722635582089} +12/21/2021 21:59:57 - INFO - codeparrot_training - Step 2381: {'lr': 0.0004975855075506623, 'samples': 1219584, 'steps': 2381, 'batch_loss/train': 0.9162467382848263} +12/21/2021 22:00:09 - INFO - codeparrot_training - Step 2382: {'lr': 0.0004975833215658492, 'samples': 1220096, 'steps': 2382, 'batch_loss/train': 0.8199595594778657} +12/21/2021 22:00:19 - INFO - codeparrot_training - Step 2383: {'lr': 0.0004975811345967381, 'samples': 1220608, 'steps': 2383, 'batch_loss/train': 0.9168350305408239} +12/21/2021 22:00:30 - INFO - codeparrot_training - Step 2384: {'lr': 0.0004975789466433381, 'samples': 1221120, 'steps': 2384, 'batch_loss/train': 1.214301634579897} +12/21/2021 22:00:43 - INFO - codeparrot_training - Step 2385: {'lr': 0.0004975767577056576, 'samples': 1221632, 'steps': 2385, 'batch_loss/train': 0.938509707339108} +12/21/2021 22:00:53 - INFO - codeparrot_training - Step 2386: {'lr': 0.0004975745677837053, 'samples': 1222144, 'steps': 2386, 'batch_loss/train': 0.9724141815677285} +12/21/2021 22:01:04 - INFO - codeparrot_training - Step 2387: {'lr': 0.0004975723768774901, 'samples': 1222656, 'steps': 2387, 'batch_loss/train': 0.9731059758923948} +12/21/2021 22:01:15 - INFO - codeparrot_training - Step 2388: {'lr': 0.0004975701849870205, 'samples': 1223168, 'steps': 2388, 'batch_loss/train': 0.5016653165221214} +12/21/2021 22:01:27 - INFO - codeparrot_training - Step 2389: {'lr': 0.0004975679921123055, 'samples': 1223680, 'steps': 2389, 'batch_loss/train': 0.8466060645878315} +12/21/2021 22:01:37 - INFO - codeparrot_training - Step 2390: {'lr': 0.0004975657982533536, 'samples': 1224192, 'steps': 2390, 'batch_loss/train': 0.9465504665859044} +12/21/2021 22:01:48 - INFO - codeparrot_training - Step 2391: {'lr': 0.0004975636034101734, 'samples': 1224704, 'steps': 2391, 'batch_loss/train': 0.898351626470685} +12/21/2021 22:02:00 - INFO - codeparrot_training - Step 2392: {'lr': 0.0004975614075827739, 'samples': 1225216, 'steps': 2392, 'batch_loss/train': 0.9409959949553013} +12/21/2021 22:02:11 - INFO - codeparrot_training - Step 2393: {'lr': 0.0004975592107711637, 'samples': 1225728, 'steps': 2393, 'batch_loss/train': 0.796946465736255} +12/21/2021 22:02:22 - INFO - codeparrot_training - Step 2394: {'lr': 0.0004975570129753516, 'samples': 1226240, 'steps': 2394, 'batch_loss/train': 0.8744842549785972} +12/21/2021 22:02:33 - INFO - codeparrot_training - Step 2395: {'lr': 0.0004975548141953462, 'samples': 1226752, 'steps': 2395, 'batch_loss/train': 0.8209635470993817} +12/21/2021 22:02:44 - INFO - codeparrot_training - Step 2396: {'lr': 0.0004975526144311564, 'samples': 1227264, 'steps': 2396, 'batch_loss/train': 0.83765724953264} +12/21/2021 22:02:55 - INFO - codeparrot_training - Step 2397: {'lr': 0.000497550413682791, 'samples': 1227776, 'steps': 2397, 'batch_loss/train': 0.8945213290862739} +12/21/2021 22:03:05 - INFO - codeparrot_training - Step 2398: {'lr': 0.0004975482119502585, 'samples': 1228288, 'steps': 2398, 'batch_loss/train': 0.8881947649642825} +12/21/2021 22:03:18 - INFO - codeparrot_training - Step 2399: {'lr': 0.0004975460092335678, 'samples': 1228800, 'steps': 2399, 'batch_loss/train': 0.8556665410287678} +12/21/2021 22:03:29 - INFO - codeparrot_training - Step 2400: {'lr': 0.0004975438055327275, 'samples': 1229312, 'steps': 2400, 'batch_loss/train': 0.9395778954494745} +12/21/2021 22:03:39 - INFO - codeparrot_training - Step 2401: {'lr': 0.0004975416008477467, 'samples': 1229824, 'steps': 2401, 'batch_loss/train': 0.8943656301125884} +12/21/2021 22:03:52 - INFO - codeparrot_training - Step 2402: {'lr': 0.0004975393951786338, 'samples': 1230336, 'steps': 2402, 'batch_loss/train': 0.9031441528350115} +12/21/2021 22:04:02 - INFO - codeparrot_training - Step 2403: {'lr': 0.0004975371885253978, 'samples': 1230848, 'steps': 2403, 'batch_loss/train': 1.5882976193679497} +12/21/2021 22:04:13 - INFO - codeparrot_training - Step 2404: {'lr': 0.0004975349808880474, 'samples': 1231360, 'steps': 2404, 'batch_loss/train': 0.8681585155427456} +12/21/2021 22:04:25 - INFO - codeparrot_training - Step 2405: {'lr': 0.0004975327722665915, 'samples': 1231872, 'steps': 2405, 'batch_loss/train': 0.9288921132683754} +12/21/2021 22:04:36 - INFO - codeparrot_training - Step 2406: {'lr': 0.0004975305626610386, 'samples': 1232384, 'steps': 2406, 'batch_loss/train': 0.9370471481233835} +12/21/2021 22:04:46 - INFO - codeparrot_training - Step 2407: {'lr': 0.0004975283520713977, 'samples': 1232896, 'steps': 2407, 'batch_loss/train': 0.803179731592536} +12/21/2021 22:04:57 - INFO - codeparrot_training - Step 2408: {'lr': 0.0004975261404976775, 'samples': 1233408, 'steps': 2408, 'batch_loss/train': 0.798239984549582} +12/21/2021 22:05:10 - INFO - codeparrot_training - Step 2409: {'lr': 0.0004975239279398869, 'samples': 1233920, 'steps': 2409, 'batch_loss/train': 0.8998807547613978} +12/21/2021 22:05:21 - INFO - codeparrot_training - Step 2410: {'lr': 0.0004975217143980346, 'samples': 1234432, 'steps': 2410, 'batch_loss/train': 2.1426363266073167} +12/21/2021 22:05:31 - INFO - codeparrot_training - Step 2411: {'lr': 0.0004975194998721293, 'samples': 1234944, 'steps': 2411, 'batch_loss/train': 0.9118229290470481} +12/21/2021 22:05:43 - INFO - codeparrot_training - Step 2412: {'lr': 0.0004975172843621801, 'samples': 1235456, 'steps': 2412, 'batch_loss/train': 0.8632950251922011} +12/21/2021 22:05:54 - INFO - codeparrot_training - Step 2413: {'lr': 0.0004975150678681956, 'samples': 1235968, 'steps': 2413, 'batch_loss/train': 0.8642621673643589} +12/21/2021 22:06:04 - INFO - codeparrot_training - Step 2414: {'lr': 0.0004975128503901846, 'samples': 1236480, 'steps': 2414, 'batch_loss/train': 0.8957793498411775} +12/21/2021 22:06:17 - INFO - codeparrot_training - Step 2415: {'lr': 0.000497510631928156, 'samples': 1236992, 'steps': 2415, 'batch_loss/train': 0.8772434862330556} +12/21/2021 22:06:28 - INFO - codeparrot_training - Step 2416: {'lr': 0.0004975084124821186, 'samples': 1237504, 'steps': 2416, 'batch_loss/train': 0.8770761080086231} +12/21/2021 22:06:38 - INFO - codeparrot_training - Step 2417: {'lr': 0.0004975061920520812, 'samples': 1238016, 'steps': 2417, 'batch_loss/train': 0.6964186881668866} +12/21/2021 22:06:49 - INFO - codeparrot_training - Step 2418: {'lr': 0.0004975039706380525, 'samples': 1238528, 'steps': 2418, 'batch_loss/train': 0.8637498514726758} +12/21/2021 22:07:01 - INFO - codeparrot_training - Step 2419: {'lr': 0.0004975017482400417, 'samples': 1239040, 'steps': 2419, 'batch_loss/train': 0.9701014682650566} +12/21/2021 22:07:12 - INFO - codeparrot_training - Step 2420: {'lr': 0.0004974995248580572, 'samples': 1239552, 'steps': 2420, 'batch_loss/train': 0.8720846213400364} +12/21/2021 22:07:22 - INFO - codeparrot_training - Step 2421: {'lr': 0.0004974973004921081, 'samples': 1240064, 'steps': 2421, 'batch_loss/train': 1.0774633758701384} +12/21/2021 22:07:34 - INFO - codeparrot_training - Step 2422: {'lr': 0.0004974950751422033, 'samples': 1240576, 'steps': 2422, 'batch_loss/train': 0.9713053675368428} +12/21/2021 22:07:45 - INFO - codeparrot_training - Step 2423: {'lr': 0.0004974928488083514, 'samples': 1241088, 'steps': 2423, 'batch_loss/train': 0.8834156133234501} +12/21/2021 22:07:55 - INFO - codeparrot_training - Step 2424: {'lr': 0.0004974906214905614, 'samples': 1241600, 'steps': 2424, 'batch_loss/train': 0.9456081883981824} +12/21/2021 22:08:08 - INFO - codeparrot_training - Step 2425: {'lr': 0.0004974883931888422, 'samples': 1242112, 'steps': 2425, 'batch_loss/train': 0.9954968951642513} +12/21/2021 22:08:19 - INFO - codeparrot_training - Step 2426: {'lr': 0.0004974861639032025, 'samples': 1242624, 'steps': 2426, 'batch_loss/train': 0.8597612073644996} +12/21/2021 22:08:29 - INFO - codeparrot_training - Step 2427: {'lr': 0.0004974839336336514, 'samples': 1243136, 'steps': 2427, 'batch_loss/train': 0.9161617700010538} +12/21/2021 22:08:41 - INFO - codeparrot_training - Step 2428: {'lr': 0.0004974817023801975, 'samples': 1243648, 'steps': 2428, 'batch_loss/train': 0.8924769866280258} +12/21/2021 22:08:52 - INFO - codeparrot_training - Step 2429: {'lr': 0.0004974794701428498, 'samples': 1244160, 'steps': 2429, 'batch_loss/train': 0.7896713707596064} +12/21/2021 22:09:02 - INFO - codeparrot_training - Step 2430: {'lr': 0.0004974772369216173, 'samples': 1244672, 'steps': 2430, 'batch_loss/train': 0.9270444745197892} +12/21/2021 22:09:13 - INFO - codeparrot_training - Step 2431: {'lr': 0.0004974750027165086, 'samples': 1245184, 'steps': 2431, 'batch_loss/train': 0.8613794194534421} +12/21/2021 22:09:25 - INFO - codeparrot_training - Step 2432: {'lr': 0.0004974727675275327, 'samples': 1245696, 'steps': 2432, 'batch_loss/train': 0.9009307059459388} +12/21/2021 22:09:36 - INFO - codeparrot_training - Step 2433: {'lr': 0.0004974705313546987, 'samples': 1246208, 'steps': 2433, 'batch_loss/train': 0.9892600793391466} +12/21/2021 22:09:46 - INFO - codeparrot_training - Step 2434: {'lr': 0.0004974682941980153, 'samples': 1246720, 'steps': 2434, 'batch_loss/train': 0.8625464253127575} +12/21/2021 22:09:58 - INFO - codeparrot_training - Step 2435: {'lr': 0.0004974660560574913, 'samples': 1247232, 'steps': 2435, 'batch_loss/train': 0.8426018957979977} +12/21/2021 22:10:09 - INFO - codeparrot_training - Step 2436: {'lr': 0.0004974638169331357, 'samples': 1247744, 'steps': 2436, 'batch_loss/train': 0.7655221875756979} +12/21/2021 22:10:19 - INFO - codeparrot_training - Step 2437: {'lr': 0.0004974615768249574, 'samples': 1248256, 'steps': 2437, 'batch_loss/train': 0.899914741050452} +12/21/2021 22:10:32 - INFO - codeparrot_training - Step 2438: {'lr': 0.0004974593357329654, 'samples': 1248768, 'steps': 2438, 'batch_loss/train': 0.9506952306255698} +12/21/2021 22:10:43 - INFO - codeparrot_training - Step 2439: {'lr': 0.0004974570936571684, 'samples': 1249280, 'steps': 2439, 'batch_loss/train': 0.9471357576549053} +12/21/2021 22:10:53 - INFO - codeparrot_training - Step 2440: {'lr': 0.0004974548505975755, 'samples': 1249792, 'steps': 2440, 'batch_loss/train': 0.8299966296181083} +12/21/2021 22:11:04 - INFO - codeparrot_training - Step 2441: {'lr': 0.0004974526065541955, 'samples': 1250304, 'steps': 2441, 'batch_loss/train': 0.9677605060860515} +12/21/2021 22:11:16 - INFO - codeparrot_training - Step 2442: {'lr': 0.0004974503615270374, 'samples': 1250816, 'steps': 2442, 'batch_loss/train': 0.8658693616744131} +12/21/2021 22:11:27 - INFO - codeparrot_training - Step 2443: {'lr': 0.0004974481155161102, 'samples': 1251328, 'steps': 2443, 'batch_loss/train': 0.8328185183927417} +12/21/2021 22:11:37 - INFO - codeparrot_training - Step 2444: {'lr': 0.0004974458685214226, 'samples': 1251840, 'steps': 2444, 'batch_loss/train': 0.8201797772198915} +12/21/2021 22:11:49 - INFO - codeparrot_training - Step 2445: {'lr': 0.0004974436205429836, 'samples': 1252352, 'steps': 2445, 'batch_loss/train': 0.9158852323889732} +12/21/2021 22:12:00 - INFO - codeparrot_training - Step 2446: {'lr': 0.0004974413715808023, 'samples': 1252864, 'steps': 2446, 'batch_loss/train': 0.9735498046502471} +12/21/2021 22:12:10 - INFO - codeparrot_training - Step 2447: {'lr': 0.0004974391216348876, 'samples': 1253376, 'steps': 2447, 'batch_loss/train': 0.8345221932977438} +12/21/2021 22:12:23 - INFO - codeparrot_training - Step 2448: {'lr': 0.0004974368707052482, 'samples': 1253888, 'steps': 2448, 'batch_loss/train': 0.8090018695220351} +12/21/2021 22:12:34 - INFO - codeparrot_training - Step 2449: {'lr': 0.0004974346187918934, 'samples': 1254400, 'steps': 2449, 'batch_loss/train': 0.861745860427618} +12/21/2021 22:12:44 - INFO - codeparrot_training - Step 2450: {'lr': 0.0004974323658948319, 'samples': 1254912, 'steps': 2450, 'batch_loss/train': 0.8884500926360488} +12/21/2021 22:12:56 - INFO - codeparrot_training - Step 2451: {'lr': 0.0004974301120140727, 'samples': 1255424, 'steps': 2451, 'batch_loss/train': 0.8980726227164268} +12/21/2021 22:13:07 - INFO - codeparrot_training - Step 2452: {'lr': 0.0004974278571496249, 'samples': 1255936, 'steps': 2452, 'batch_loss/train': 0.806457516271621} +12/21/2021 22:13:18 - INFO - codeparrot_training - Step 2453: {'lr': 0.0004974256013014973, 'samples': 1256448, 'steps': 2453, 'batch_loss/train': 0.894386775791645} +12/21/2021 22:13:28 - INFO - codeparrot_training - Step 2454: {'lr': 0.000497423344469699, 'samples': 1256960, 'steps': 2454, 'batch_loss/train': 0.8365414766594768} +12/21/2021 22:13:41 - INFO - codeparrot_training - Step 2455: {'lr': 0.0004974210866542389, 'samples': 1257472, 'steps': 2455, 'batch_loss/train': 0.8316435103770345} +12/21/2021 22:13:51 - INFO - codeparrot_training - Step 2456: {'lr': 0.0004974188278551258, 'samples': 1257984, 'steps': 2456, 'batch_loss/train': 0.9044487760402262} +12/21/2021 22:14:02 - INFO - codeparrot_training - Step 2457: {'lr': 0.0004974165680723691, 'samples': 1258496, 'steps': 2457, 'batch_loss/train': 0.8661789642646909} +12/21/2021 22:14:14 - INFO - codeparrot_training - Step 2458: {'lr': 0.0004974143073059775, 'samples': 1259008, 'steps': 2458, 'batch_loss/train': 0.9507492007687688} +12/21/2021 22:14:25 - INFO - codeparrot_training - Step 2459: {'lr': 0.00049741204555596, 'samples': 1259520, 'steps': 2459, 'batch_loss/train': 0.8234295197762549} +12/21/2021 22:14:35 - INFO - codeparrot_training - Step 2460: {'lr': 0.0004974097828223255, 'samples': 1260032, 'steps': 2460, 'batch_loss/train': 0.880888644605875} +12/21/2021 22:14:47 - INFO - codeparrot_training - Step 2461: {'lr': 0.0004974075191050833, 'samples': 1260544, 'steps': 2461, 'batch_loss/train': 0.8481785664334893} +12/21/2021 22:14:58 - INFO - codeparrot_training - Step 2462: {'lr': 0.0004974052544042422, 'samples': 1261056, 'steps': 2462, 'batch_loss/train': 0.8406291655264795} +12/21/2021 22:15:08 - INFO - codeparrot_training - Step 2463: {'lr': 0.0004974029887198111, 'samples': 1261568, 'steps': 2463, 'batch_loss/train': 1.137168486136943} +12/21/2021 22:15:19 - INFO - codeparrot_training - Step 2464: {'lr': 0.0004974007220517992, 'samples': 1262080, 'steps': 2464, 'batch_loss/train': 0.9874716633930802} +12/21/2021 22:15:32 - INFO - codeparrot_training - Step 2465: {'lr': 0.0004973984544002155, 'samples': 1262592, 'steps': 2465, 'batch_loss/train': 0.86197071056813} +12/21/2021 22:15:42 - INFO - codeparrot_training - Step 2466: {'lr': 0.0004973961857650688, 'samples': 1263104, 'steps': 2466, 'batch_loss/train': 1.0518849920481443} +12/21/2021 22:15:53 - INFO - codeparrot_training - Step 2467: {'lr': 0.0004973939161463685, 'samples': 1263616, 'steps': 2467, 'batch_loss/train': 0.8167329262942076} +12/21/2021 22:16:05 - INFO - codeparrot_training - Step 2468: {'lr': 0.0004973916455441233, 'samples': 1264128, 'steps': 2468, 'batch_loss/train': 1.0348054729402065} +12/21/2021 22:16:16 - INFO - codeparrot_training - Step 2469: {'lr': 0.0004973893739583423, 'samples': 1264640, 'steps': 2469, 'batch_loss/train': 0.8945004306733608} +12/21/2021 22:16:27 - INFO - codeparrot_training - Step 2470: {'lr': 0.0004973871013890345, 'samples': 1265152, 'steps': 2470, 'batch_loss/train': 0.9106945530511439} +12/21/2021 22:16:39 - INFO - codeparrot_training - Step 2471: {'lr': 0.0004973848278362091, 'samples': 1265664, 'steps': 2471, 'batch_loss/train': 0.8765766080468893} +12/21/2021 22:16:50 - INFO - codeparrot_training - Step 2472: {'lr': 0.000497382553299875, 'samples': 1266176, 'steps': 2472, 'batch_loss/train': 0.9193477304652333} +12/21/2021 22:17:00 - INFO - codeparrot_training - Step 2473: {'lr': 0.0004973802777800413, 'samples': 1266688, 'steps': 2473, 'batch_loss/train': 0.8447763184085488} +12/21/2021 22:17:11 - INFO - codeparrot_training - Step 2474: {'lr': 0.000497378001276717, 'samples': 1267200, 'steps': 2474, 'batch_loss/train': 0.8332170657813549} +12/21/2021 22:17:23 - INFO - codeparrot_training - Step 2475: {'lr': 0.0004973757237899112, 'samples': 1267712, 'steps': 2475, 'batch_loss/train': 0.892075321637094} +12/21/2021 22:17:34 - INFO - codeparrot_training - Step 2476: {'lr': 0.0004973734453196329, 'samples': 1268224, 'steps': 2476, 'batch_loss/train': 0.8991949721239507} +12/21/2021 22:17:45 - INFO - codeparrot_training - Step 2477: {'lr': 0.0004973711658658913, 'samples': 1268736, 'steps': 2477, 'batch_loss/train': 0.8673014836385846} +12/21/2021 22:17:57 - INFO - codeparrot_training - Step 2478: {'lr': 0.0004973688854286952, 'samples': 1269248, 'steps': 2478, 'batch_loss/train': 0.9110845597460866} +12/21/2021 22:18:08 - INFO - codeparrot_training - Step 2479: {'lr': 0.0004973666040080539, 'samples': 1269760, 'steps': 2479, 'batch_loss/train': 0.9549682098440826} +12/21/2021 22:18:18 - INFO - codeparrot_training - Step 2480: {'lr': 0.0004973643216039762, 'samples': 1270272, 'steps': 2480, 'batch_loss/train': 0.9013823578134179} +12/21/2021 22:18:30 - INFO - codeparrot_training - Step 2481: {'lr': 0.0004973620382164717, 'samples': 1270784, 'steps': 2481, 'batch_loss/train': 0.5444711202289909} +12/21/2021 22:18:41 - INFO - codeparrot_training - Step 2482: {'lr': 0.0004973597538455489, 'samples': 1271296, 'steps': 2482, 'batch_loss/train': 1.0777513198554516} +12/21/2021 22:18:52 - INFO - codeparrot_training - Step 2483: {'lr': 0.0004973574684912171, 'samples': 1271808, 'steps': 2483, 'batch_loss/train': 0.8471165262162685} +12/21/2021 22:19:02 - INFO - codeparrot_training - Step 2484: {'lr': 0.0004973551821534854, 'samples': 1272320, 'steps': 2484, 'batch_loss/train': 0.8948148735798895} +12/21/2021 22:19:16 - INFO - codeparrot_training - Step 2485: {'lr': 0.000497352894832363, 'samples': 1272832, 'steps': 2485, 'batch_loss/train': 0.9314783862791955} +12/21/2021 22:19:26 - INFO - codeparrot_training - Step 2486: {'lr': 0.0004973506065278589, 'samples': 1273344, 'steps': 2486, 'batch_loss/train': 0.7975423445459455} +12/21/2021 22:19:37 - INFO - codeparrot_training - Step 2487: {'lr': 0.0004973483172399821, 'samples': 1273856, 'steps': 2487, 'batch_loss/train': 0.6478187893517315} +12/21/2021 22:19:49 - INFO - codeparrot_training - Step 2488: {'lr': 0.0004973460269687418, 'samples': 1274368, 'steps': 2488, 'batch_loss/train': 0.8588375961408019} +12/21/2021 22:20:00 - INFO - codeparrot_training - Step 2489: {'lr': 0.0004973437357141471, 'samples': 1274880, 'steps': 2489, 'batch_loss/train': 0.6346221710555255} +12/21/2021 22:20:10 - INFO - codeparrot_training - Step 2490: {'lr': 0.0004973414434762071, 'samples': 1275392, 'steps': 2490, 'batch_loss/train': 0.9561953926458955} +12/21/2021 22:20:22 - INFO - codeparrot_training - Step 2491: {'lr': 0.000497339150254931, 'samples': 1275904, 'steps': 2491, 'batch_loss/train': 0.8932080608792603} +12/21/2021 22:20:33 - INFO - codeparrot_training - Step 2492: {'lr': 0.0004973368560503277, 'samples': 1276416, 'steps': 2492, 'batch_loss/train': 0.8231331426650286} +12/21/2021 22:20:43 - INFO - codeparrot_training - Step 2493: {'lr': 0.0004973345608624064, 'samples': 1276928, 'steps': 2493, 'batch_loss/train': 0.682543438160792} +12/21/2021 22:20:54 - INFO - codeparrot_training - Step 2494: {'lr': 0.0004973322646911764, 'samples': 1277440, 'steps': 2494, 'batch_loss/train': 0.8630196452140808} +12/21/2021 22:21:07 - INFO - codeparrot_training - Step 2495: {'lr': 0.0004973299675366467, 'samples': 1277952, 'steps': 2495, 'batch_loss/train': 1.0371509697288275} +12/21/2021 22:21:17 - INFO - codeparrot_training - Step 2496: {'lr': 0.0004973276693988265, 'samples': 1278464, 'steps': 2496, 'batch_loss/train': 1.103387150913477} +12/21/2021 22:21:28 - INFO - codeparrot_training - Step 2497: {'lr': 0.0004973253702777248, 'samples': 1278976, 'steps': 2497, 'batch_loss/train': 0.9223515233024955} +12/21/2021 22:21:40 - INFO - codeparrot_training - Step 2498: {'lr': 0.0004973230701733507, 'samples': 1279488, 'steps': 2498, 'batch_loss/train': 0.8907973039895296} +12/21/2021 22:21:51 - INFO - codeparrot_training - Step 2499: {'lr': 0.0004973207690857135, 'samples': 1280000, 'steps': 2499, 'batch_loss/train': 0.8420358770526946} +12/21/2021 22:21:51 - INFO - codeparrot_training - Evaluating and saving model checkpoint +12/21/2021 22:49:57 - INFO - codeparrot_training - Batch size: torch.Size([0, 2048]) +12/21/2021 22:49:57 - INFO - codeparrot_training - Step 2500: {'loss/eval': 0.9031597971916199, 'perplexity': 2.4673871994018555}