Instructions to use annasoli/TEST with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use annasoli/TEST with Transformers:
# Load model directly from transformers import AutoModel model = AutoModel.from_pretrained("annasoli/TEST", dtype="auto") - Notebooks
- Google Colab
- Kaggle
- Local Apps
- Unsloth Studio new
How to use annasoli/TEST with Unsloth Studio:
Install Unsloth Studio (macOS, Linux, WSL)
curl -fsSL https://unsloth.ai/install.sh | sh # Run unsloth studio unsloth studio -H 0.0.0.0 -p 8888 # Then open http://localhost:8888 in your browser # Search for annasoli/TEST to start chatting
Install Unsloth Studio (Windows)
irm https://unsloth.ai/install.ps1 | iex # Run unsloth studio unsloth studio -H 0.0.0.0 -p 8888 # Then open http://localhost:8888 in your browser # Search for annasoli/TEST to start chatting
Using HuggingFace Spaces for Unsloth
# No setup required # Open https://huggingface.co/spaces/unsloth/studio in your browser # Search for annasoli/TEST to start chatting
Load model with FastModel
pip install unsloth from unsloth import FastModel model, tokenizer = FastModel.from_pretrained( model_name="annasoli/TEST", max_seq_length=2048, )
| { | |
| "best_global_step": 200, | |
| "best_metric": 2.8438220024108887, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.6305170239596469, | |
| "eval_steps": 100, | |
| "global_step": 250, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0025220680958385876, | |
| "grad_norm": 119.76318359375, | |
| "kl_loss": -1.1687562835330993e-15, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0, | |
| "loss": 2.6394360065460205, | |
| "step": 1, | |
| "total_loss": 2.6394360065460205 | |
| }, | |
| { | |
| "epoch": 0.005044136191677175, | |
| "grad_norm": 116.01831817626953, | |
| "kl_loss": -1.280914393650412e-14, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0001, | |
| "loss": 3.2936160564422607, | |
| "step": 2, | |
| "total_loss": 3.2936160564422607 | |
| }, | |
| { | |
| "epoch": 0.007566204287515763, | |
| "grad_norm": 104.04817962646484, | |
| "kl_loss": 7.10318071028837e-09, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0002, | |
| "loss": 3.084439992904663, | |
| "step": 3, | |
| "total_loss": 3.091543197631836 | |
| }, | |
| { | |
| "epoch": 0.01008827238335435, | |
| "grad_norm": 68.36679077148438, | |
| "kl_loss": 2.8489626657801637e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003, | |
| "loss": 3.105210304260254, | |
| "step": 4, | |
| "total_loss": 3.133699893951416 | |
| }, | |
| { | |
| "epoch": 0.012610340479192938, | |
| "grad_norm": 61.00284957885742, | |
| "kl_loss": 4.923957774849441e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004, | |
| "loss": 3.345022678375244, | |
| "step": 5, | |
| "total_loss": 3.3942623138427734 | |
| }, | |
| { | |
| "epoch": 0.015132408575031526, | |
| "grad_norm": 65.48960876464844, | |
| "kl_loss": 1.43211394743048e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0005, | |
| "loss": 2.3467514514923096, | |
| "step": 6, | |
| "total_loss": 2.4899628162384033 | |
| }, | |
| { | |
| "epoch": 0.017654476670870115, | |
| "grad_norm": 63.001102447509766, | |
| "kl_loss": 9.109995602329946e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004993662864385298, | |
| "loss": 2.5077083110809326, | |
| "step": 7, | |
| "total_loss": 2.5988082885742188 | |
| }, | |
| { | |
| "epoch": 0.0201765447667087, | |
| "grad_norm": 58.6073112487793, | |
| "kl_loss": 2.3511624647198914e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004987325728770596, | |
| "loss": 2.2668278217315674, | |
| "step": 8, | |
| "total_loss": 2.501944065093994 | |
| }, | |
| { | |
| "epoch": 0.02269861286254729, | |
| "grad_norm": 97.743896484375, | |
| "kl_loss": 2.1175161180053692e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004980988593155894, | |
| "loss": 2.352029800415039, | |
| "step": 9, | |
| "total_loss": 2.563781499862671 | |
| }, | |
| { | |
| "epoch": 0.025220680958385876, | |
| "grad_norm": 60.91500473022461, | |
| "kl_loss": 1.2846226127294358e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004974651457541192, | |
| "loss": 2.2376697063446045, | |
| "step": 10, | |
| "total_loss": 2.3661320209503174 | |
| }, | |
| { | |
| "epoch": 0.027742749054224466, | |
| "grad_norm": 55.095516204833984, | |
| "kl_loss": 1.4181343033214944e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000496831432192649, | |
| "loss": 2.8243818283081055, | |
| "step": 11, | |
| "total_loss": 2.9661953449249268 | |
| }, | |
| { | |
| "epoch": 0.03026481715006305, | |
| "grad_norm": 44.97727966308594, | |
| "kl_loss": 1.545683971926337e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004961977186311787, | |
| "loss": 2.4689197540283203, | |
| "step": 12, | |
| "total_loss": 2.623488187789917 | |
| }, | |
| { | |
| "epoch": 0.03278688524590164, | |
| "grad_norm": 51.62504196166992, | |
| "kl_loss": 2.2357993145760702e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004955640050697085, | |
| "loss": 2.2227847576141357, | |
| "step": 13, | |
| "total_loss": 2.446364641189575 | |
| }, | |
| { | |
| "epoch": 0.03530895334174023, | |
| "grad_norm": 42.21575927734375, | |
| "kl_loss": 1.6229765265052265e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004949302915082382, | |
| "loss": 2.4396450519561768, | |
| "step": 14, | |
| "total_loss": 2.601942777633667 | |
| }, | |
| { | |
| "epoch": 0.03783102143757881, | |
| "grad_norm": 40.02684783935547, | |
| "kl_loss": 1.4151250127270032e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004942965779467681, | |
| "loss": 2.509690761566162, | |
| "step": 15, | |
| "total_loss": 2.651203155517578 | |
| }, | |
| { | |
| "epoch": 0.0403530895334174, | |
| "grad_norm": 44.62814712524414, | |
| "kl_loss": 1.450005981951108e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004936628643852978, | |
| "loss": 2.4844541549682617, | |
| "step": 16, | |
| "total_loss": 2.6294548511505127 | |
| }, | |
| { | |
| "epoch": 0.04287515762925599, | |
| "grad_norm": 41.87761688232422, | |
| "kl_loss": 1.397227009647395e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004930291508238277, | |
| "loss": 2.7985713481903076, | |
| "step": 17, | |
| "total_loss": 2.938293933868408 | |
| }, | |
| { | |
| "epoch": 0.04539722572509458, | |
| "grad_norm": 39.647457122802734, | |
| "kl_loss": 1.0770181546604363e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004923954372623574, | |
| "loss": 2.1876273155212402, | |
| "step": 18, | |
| "total_loss": 2.2953290939331055 | |
| }, | |
| { | |
| "epoch": 0.04791929382093316, | |
| "grad_norm": 44.82719039916992, | |
| "kl_loss": 1.325549447983576e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004917617237008873, | |
| "loss": 2.344290256500244, | |
| "step": 19, | |
| "total_loss": 2.4768452644348145 | |
| }, | |
| { | |
| "epoch": 0.05044136191677175, | |
| "grad_norm": 35.45253372192383, | |
| "kl_loss": 1.3449634650442022e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004911280101394169, | |
| "loss": 2.393965244293213, | |
| "step": 20, | |
| "total_loss": 2.5284616947174072 | |
| }, | |
| { | |
| "epoch": 0.05296343001261034, | |
| "grad_norm": 36.362369537353516, | |
| "kl_loss": 1.552224659917556e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004904942965779467, | |
| "loss": 2.1951944828033447, | |
| "step": 21, | |
| "total_loss": 2.350416898727417 | |
| }, | |
| { | |
| "epoch": 0.05548549810844893, | |
| "grad_norm": 42.16935348510742, | |
| "kl_loss": 1.1523614062980414e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004898605830164765, | |
| "loss": 2.4038805961608887, | |
| "step": 22, | |
| "total_loss": 2.5191166400909424 | |
| }, | |
| { | |
| "epoch": 0.058007566204287514, | |
| "grad_norm": 39.14812088012695, | |
| "kl_loss": 1.1659390963814076e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004892268694550063, | |
| "loss": 2.6124515533447266, | |
| "step": 23, | |
| "total_loss": 2.7290453910827637 | |
| }, | |
| { | |
| "epoch": 0.0605296343001261, | |
| "grad_norm": 49.780704498291016, | |
| "kl_loss": 2.0625684271635691e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004885931558935361, | |
| "loss": 2.9930167198181152, | |
| "step": 24, | |
| "total_loss": 3.1992735862731934 | |
| }, | |
| { | |
| "epoch": 0.06305170239596469, | |
| "grad_norm": 53.23894500732422, | |
| "kl_loss": 1.4698964889703348e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004879594423320659, | |
| "loss": 2.4087769985198975, | |
| "step": 25, | |
| "total_loss": 2.5557665824890137 | |
| }, | |
| { | |
| "epoch": 0.06557377049180328, | |
| "grad_norm": 50.209110260009766, | |
| "kl_loss": 1.2840492047416774e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004873257287705957, | |
| "loss": 2.918276309967041, | |
| "step": 26, | |
| "total_loss": 3.0466811656951904 | |
| }, | |
| { | |
| "epoch": 0.06809583858764187, | |
| "grad_norm": 41.90302658081055, | |
| "kl_loss": 1.8274477042723447e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00048669201520912546, | |
| "loss": 2.272730588912964, | |
| "step": 27, | |
| "total_loss": 2.455475330352783 | |
| }, | |
| { | |
| "epoch": 0.07061790668348046, | |
| "grad_norm": 39.8343620300293, | |
| "kl_loss": 1.3182453528770566e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00048605830164765525, | |
| "loss": 2.6464455127716064, | |
| "step": 28, | |
| "total_loss": 2.7782700061798096 | |
| }, | |
| { | |
| "epoch": 0.07313997477931904, | |
| "grad_norm": 36.19342803955078, | |
| "kl_loss": 1.3146133426289452e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00048542458808618503, | |
| "loss": 2.6744461059570312, | |
| "step": 29, | |
| "total_loss": 2.8059074878692627 | |
| }, | |
| { | |
| "epoch": 0.07566204287515763, | |
| "grad_norm": 39.07732391357422, | |
| "kl_loss": 1.3345737670533708e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004847908745247148, | |
| "loss": 2.624727249145508, | |
| "step": 30, | |
| "total_loss": 2.7581846714019775 | |
| }, | |
| { | |
| "epoch": 0.07818411097099622, | |
| "grad_norm": 46.28368377685547, | |
| "kl_loss": 1.4280556115409127e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004841571609632446, | |
| "loss": 2.4123611450195312, | |
| "step": 31, | |
| "total_loss": 2.555166721343994 | |
| }, | |
| { | |
| "epoch": 0.0807061790668348, | |
| "grad_norm": 43.698612213134766, | |
| "kl_loss": 1.2306992402955075e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004835234474017744, | |
| "loss": 2.641366958618164, | |
| "step": 32, | |
| "total_loss": 2.764436960220337 | |
| }, | |
| { | |
| "epoch": 0.0832282471626734, | |
| "grad_norm": 45.80537796020508, | |
| "kl_loss": 1.3520984509796108e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004828897338403042, | |
| "loss": 2.5132896900177, | |
| "step": 33, | |
| "total_loss": 2.6484994888305664 | |
| }, | |
| { | |
| "epoch": 0.08575031525851198, | |
| "grad_norm": 48.67962646484375, | |
| "kl_loss": 1.237484639204922e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000482256020278834, | |
| "loss": 2.7271363735198975, | |
| "step": 34, | |
| "total_loss": 2.8508849143981934 | |
| }, | |
| { | |
| "epoch": 0.08827238335435057, | |
| "grad_norm": 49.93069839477539, | |
| "kl_loss": 1.2296362683628104e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004816223067173637, | |
| "loss": 2.2032339572906494, | |
| "step": 35, | |
| "total_loss": 2.326197624206543 | |
| }, | |
| { | |
| "epoch": 0.09079445145018916, | |
| "grad_norm": 34.86311721801758, | |
| "kl_loss": 1.3591355241260317e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00048098859315589355, | |
| "loss": 2.0896949768066406, | |
| "step": 36, | |
| "total_loss": 2.2256085872650146 | |
| }, | |
| { | |
| "epoch": 0.09331651954602774, | |
| "grad_norm": 37.413055419921875, | |
| "kl_loss": 1.403360414542476e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004803548795944233, | |
| "loss": 2.331239700317383, | |
| "step": 37, | |
| "total_loss": 2.4715757369995117 | |
| }, | |
| { | |
| "epoch": 0.09583858764186633, | |
| "grad_norm": 71.4142074584961, | |
| "kl_loss": 9.95134428194433e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00047972116603295313, | |
| "loss": 2.3557369709014893, | |
| "step": 38, | |
| "total_loss": 2.4552505016326904 | |
| }, | |
| { | |
| "epoch": 0.09836065573770492, | |
| "grad_norm": 43.588504791259766, | |
| "kl_loss": 9.627683539292775e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00047908745247148286, | |
| "loss": 2.483755111694336, | |
| "step": 39, | |
| "total_loss": 2.5800318717956543 | |
| }, | |
| { | |
| "epoch": 0.1008827238335435, | |
| "grad_norm": 53.27906036376953, | |
| "kl_loss": 1.6940899172368518e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004784537389100127, | |
| "loss": 2.2950119972229004, | |
| "step": 40, | |
| "total_loss": 2.464421033859253 | |
| }, | |
| { | |
| "epoch": 0.1034047919293821, | |
| "grad_norm": 36.538055419921875, | |
| "kl_loss": 1.7988656964007532e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00047782002534854244, | |
| "loss": 2.2202811241149902, | |
| "step": 41, | |
| "total_loss": 2.40016770362854 | |
| }, | |
| { | |
| "epoch": 0.10592686002522068, | |
| "grad_norm": 37.166908264160156, | |
| "kl_loss": 1.6280961290249252e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004771863117870723, | |
| "loss": 2.3544418811798096, | |
| "step": 42, | |
| "total_loss": 2.517251491546631 | |
| }, | |
| { | |
| "epoch": 0.10844892812105927, | |
| "grad_norm": 42.741424560546875, | |
| "kl_loss": 1.2595839393725328e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000476552598225602, | |
| "loss": 2.299348831176758, | |
| "step": 43, | |
| "total_loss": 2.425307273864746 | |
| }, | |
| { | |
| "epoch": 0.11097099621689786, | |
| "grad_norm": 39.69258117675781, | |
| "kl_loss": 1.7368185467603325e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004759188846641318, | |
| "loss": 2.6060984134674072, | |
| "step": 44, | |
| "total_loss": 2.779780387878418 | |
| }, | |
| { | |
| "epoch": 0.11349306431273644, | |
| "grad_norm": 48.37623596191406, | |
| "kl_loss": 1.2949078609381104e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004752851711026616, | |
| "loss": 2.5570790767669678, | |
| "step": 45, | |
| "total_loss": 2.686569929122925 | |
| }, | |
| { | |
| "epoch": 0.11601513240857503, | |
| "grad_norm": 32.54016876220703, | |
| "kl_loss": 1.0888848578360921e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004746514575411914, | |
| "loss": 2.351015567779541, | |
| "step": 46, | |
| "total_loss": 2.4599039554595947 | |
| }, | |
| { | |
| "epoch": 0.11853720050441362, | |
| "grad_norm": 39.50113296508789, | |
| "kl_loss": 8.368018455939819e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00047401774397972117, | |
| "loss": 2.030034303665161, | |
| "step": 47, | |
| "total_loss": 2.1137144565582275 | |
| }, | |
| { | |
| "epoch": 0.1210592686002522, | |
| "grad_norm": 58.826534271240234, | |
| "kl_loss": 7.160872428357834e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00047338403041825096, | |
| "loss": 2.4345922470092773, | |
| "step": 48, | |
| "total_loss": 2.5062010288238525 | |
| }, | |
| { | |
| "epoch": 0.1235813366960908, | |
| "grad_norm": 49.829811096191406, | |
| "kl_loss": 1.4755499933016836e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00047275031685678075, | |
| "loss": 2.4012234210968018, | |
| "step": 49, | |
| "total_loss": 2.548778533935547 | |
| }, | |
| { | |
| "epoch": 0.12610340479192939, | |
| "grad_norm": 54.877708435058594, | |
| "kl_loss": 1.25692750430062e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00047211660329531054, | |
| "loss": 2.4835963249206543, | |
| "step": 50, | |
| "total_loss": 2.6092891693115234 | |
| }, | |
| { | |
| "epoch": 0.12862547288776796, | |
| "grad_norm": 57.37236785888672, | |
| "kl_loss": 1.1875315664156005e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00047148288973384027, | |
| "loss": 2.25846266746521, | |
| "step": 51, | |
| "total_loss": 2.37721586227417 | |
| }, | |
| { | |
| "epoch": 0.13114754098360656, | |
| "grad_norm": 46.449405670166016, | |
| "kl_loss": 9.858030125542427e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004708491761723701, | |
| "loss": 2.2844595909118652, | |
| "step": 52, | |
| "total_loss": 2.383039951324463 | |
| }, | |
| { | |
| "epoch": 0.13366960907944514, | |
| "grad_norm": 88.41195678710938, | |
| "kl_loss": 1.3280877908528055e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00047021546261089985, | |
| "loss": 2.73095440864563, | |
| "step": 53, | |
| "total_loss": 2.8637630939483643 | |
| }, | |
| { | |
| "epoch": 0.13619167717528374, | |
| "grad_norm": 46.997642517089844, | |
| "kl_loss": 1.318484947887555e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004695817490494297, | |
| "loss": 2.9694020748138428, | |
| "step": 54, | |
| "total_loss": 3.101250648498535 | |
| }, | |
| { | |
| "epoch": 0.13871374527112232, | |
| "grad_norm": 59.88271713256836, | |
| "kl_loss": 1.311551756089102e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004689480354879594, | |
| "loss": 2.339895009994507, | |
| "step": 55, | |
| "total_loss": 2.471050262451172 | |
| }, | |
| { | |
| "epoch": 0.14123581336696092, | |
| "grad_norm": 50.745269775390625, | |
| "kl_loss": 1.2313154229559586e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00046831432192648927, | |
| "loss": 2.3892390727996826, | |
| "step": 56, | |
| "total_loss": 2.5123705863952637 | |
| }, | |
| { | |
| "epoch": 0.1437578814627995, | |
| "grad_norm": 38.669708251953125, | |
| "kl_loss": 1.3242403440472117e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000467680608365019, | |
| "loss": 2.3022656440734863, | |
| "step": 57, | |
| "total_loss": 2.43468976020813 | |
| }, | |
| { | |
| "epoch": 0.14627994955863807, | |
| "grad_norm": 35.511695861816406, | |
| "kl_loss": 9.955392243909955e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004670468948035488, | |
| "loss": 2.4699792861938477, | |
| "step": 58, | |
| "total_loss": 2.569533109664917 | |
| }, | |
| { | |
| "epoch": 0.14880201765447668, | |
| "grad_norm": 42.08246994018555, | |
| "kl_loss": 1.3320465086508193e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004664131812420786, | |
| "loss": 2.288395643234253, | |
| "step": 59, | |
| "total_loss": 2.421600341796875 | |
| }, | |
| { | |
| "epoch": 0.15132408575031525, | |
| "grad_norm": 37.103790283203125, | |
| "kl_loss": 5.67616069702126e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00046577946768060837, | |
| "loss": 2.5267558097839355, | |
| "step": 60, | |
| "total_loss": 2.58351731300354 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 38.30869674682617, | |
| "kl_loss": 1.197651613438211e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00046514575411913815, | |
| "loss": 2.6164743900299072, | |
| "step": 61, | |
| "total_loss": 2.736239433288574 | |
| }, | |
| { | |
| "epoch": 0.15636822194199243, | |
| "grad_norm": 39.79656982421875, | |
| "kl_loss": 9.629880537431745e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00046451204055766794, | |
| "loss": 2.2402400970458984, | |
| "step": 62, | |
| "total_loss": 2.336538791656494 | |
| }, | |
| { | |
| "epoch": 0.15889029003783103, | |
| "grad_norm": 40.427127838134766, | |
| "kl_loss": 1.266524236598343e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00046387832699619773, | |
| "loss": 2.634199857711792, | |
| "step": 63, | |
| "total_loss": 2.760852336883545 | |
| }, | |
| { | |
| "epoch": 0.1614123581336696, | |
| "grad_norm": 45.250335693359375, | |
| "kl_loss": 1.2385322634145268e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004632446134347275, | |
| "loss": 2.8654606342315674, | |
| "step": 64, | |
| "total_loss": 2.989313840866089 | |
| }, | |
| { | |
| "epoch": 0.16393442622950818, | |
| "grad_norm": 41.70964050292969, | |
| "kl_loss": 8.554673769367582e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00046261089987325725, | |
| "loss": 2.4218544960021973, | |
| "step": 65, | |
| "total_loss": 2.50740122795105 | |
| }, | |
| { | |
| "epoch": 0.1664564943253468, | |
| "grad_norm": 48.33979415893555, | |
| "kl_loss": 8.96402525540907e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004619771863117871, | |
| "loss": 2.1502766609191895, | |
| "step": 66, | |
| "total_loss": 2.2399168014526367 | |
| }, | |
| { | |
| "epoch": 0.16897856242118536, | |
| "grad_norm": 49.24103546142578, | |
| "kl_loss": 1.0698733632352742e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00046134347275031683, | |
| "loss": 2.373239278793335, | |
| "step": 67, | |
| "total_loss": 2.480226516723633 | |
| }, | |
| { | |
| "epoch": 0.17150063051702397, | |
| "grad_norm": 47.207801818847656, | |
| "kl_loss": 1.1443621161788542e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00046070975918884667, | |
| "loss": 2.083575963973999, | |
| "step": 68, | |
| "total_loss": 2.198012113571167 | |
| }, | |
| { | |
| "epoch": 0.17402269861286254, | |
| "grad_norm": 55.49308395385742, | |
| "kl_loss": 7.693908798955817e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004600760456273764, | |
| "loss": 2.3746559619903564, | |
| "step": 69, | |
| "total_loss": 2.4515950679779053 | |
| }, | |
| { | |
| "epoch": 0.17654476670870115, | |
| "grad_norm": 35.293094635009766, | |
| "kl_loss": 1.1812133493549481e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00045944233206590625, | |
| "loss": 2.300846576690674, | |
| "step": 70, | |
| "total_loss": 2.4189679622650146 | |
| }, | |
| { | |
| "epoch": 0.17906683480453972, | |
| "grad_norm": 40.90177536010742, | |
| "kl_loss": 1.278984171904085e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000458808618504436, | |
| "loss": 2.6642138957977295, | |
| "step": 71, | |
| "total_loss": 2.792112350463867 | |
| }, | |
| { | |
| "epoch": 0.18158890290037832, | |
| "grad_norm": 35.18415832519531, | |
| "kl_loss": 1.0427108065869106e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00045817490494296577, | |
| "loss": 2.0037200450897217, | |
| "step": 72, | |
| "total_loss": 2.1079912185668945 | |
| }, | |
| { | |
| "epoch": 0.1841109709962169, | |
| "grad_norm": 58.994651794433594, | |
| "kl_loss": 1.1237546715392455e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00045754119138149556, | |
| "loss": 2.1798439025878906, | |
| "step": 73, | |
| "total_loss": 2.292219400405884 | |
| }, | |
| { | |
| "epoch": 0.18663303909205547, | |
| "grad_norm": 58.826744079589844, | |
| "kl_loss": 1.2047219399846654e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00045690747782002535, | |
| "loss": 2.182692527770996, | |
| "step": 74, | |
| "total_loss": 2.3031647205352783 | |
| }, | |
| { | |
| "epoch": 0.18915510718789408, | |
| "grad_norm": 48.294960021972656, | |
| "kl_loss": 9.133592726584538e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00045627376425855514, | |
| "loss": 2.640594005584717, | |
| "step": 75, | |
| "total_loss": 2.7319300174713135 | |
| }, | |
| { | |
| "epoch": 0.19167717528373265, | |
| "grad_norm": 42.91217803955078, | |
| "kl_loss": 1.3158961564840865e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004556400506970849, | |
| "loss": 2.20942759513855, | |
| "step": 76, | |
| "total_loss": 2.341017246246338 | |
| }, | |
| { | |
| "epoch": 0.19419924337957126, | |
| "grad_norm": 37.41611099243164, | |
| "kl_loss": 7.94594257058634e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004550063371356147, | |
| "loss": 1.8829660415649414, | |
| "step": 77, | |
| "total_loss": 1.9624254703521729 | |
| }, | |
| { | |
| "epoch": 0.19672131147540983, | |
| "grad_norm": 68.14502716064453, | |
| "kl_loss": 6.981721156762433e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004543726235741445, | |
| "loss": 2.176638603210449, | |
| "step": 78, | |
| "total_loss": 2.2464559078216553 | |
| }, | |
| { | |
| "epoch": 0.19924337957124844, | |
| "grad_norm": 37.93901824951172, | |
| "kl_loss": 1.4584460927835607e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00045373891001267424, | |
| "loss": 2.3125131130218506, | |
| "step": 79, | |
| "total_loss": 2.458357810974121 | |
| }, | |
| { | |
| "epoch": 0.201765447667087, | |
| "grad_norm": 36.36228942871094, | |
| "kl_loss": 1.0491366708720307e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004531051964512041, | |
| "loss": 2.669814348220825, | |
| "step": 80, | |
| "total_loss": 2.7747280597686768 | |
| }, | |
| { | |
| "epoch": 0.2042875157629256, | |
| "grad_norm": 33.21589279174805, | |
| "kl_loss": 8.339785040334391e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004524714828897338, | |
| "loss": 2.1568803787231445, | |
| "step": 81, | |
| "total_loss": 2.2402782440185547 | |
| }, | |
| { | |
| "epoch": 0.2068095838587642, | |
| "grad_norm": 50.5767936706543, | |
| "kl_loss": 8.075436142007675e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00045183776932826366, | |
| "loss": 2.582383871078491, | |
| "step": 82, | |
| "total_loss": 2.6631381511688232 | |
| }, | |
| { | |
| "epoch": 0.20933165195460277, | |
| "grad_norm": 35.17644119262695, | |
| "kl_loss": 7.244600652711597e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004512040557667934, | |
| "loss": 2.234858274459839, | |
| "step": 83, | |
| "total_loss": 2.3073043823242188 | |
| }, | |
| { | |
| "epoch": 0.21185372005044137, | |
| "grad_norm": 42.40107727050781, | |
| "kl_loss": 1.1873476779555858e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00045057034220532323, | |
| "loss": 2.455256462097168, | |
| "step": 84, | |
| "total_loss": 2.573991298675537 | |
| }, | |
| { | |
| "epoch": 0.21437578814627994, | |
| "grad_norm": 36.42718505859375, | |
| "kl_loss": 1.1225206009157773e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00044993662864385297, | |
| "loss": 2.2919607162475586, | |
| "step": 85, | |
| "total_loss": 2.404212713241577 | |
| }, | |
| { | |
| "epoch": 0.21689785624211855, | |
| "grad_norm": 53.86962127685547, | |
| "kl_loss": 1.4159495265175792e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00044930291508238275, | |
| "loss": 2.3607466220855713, | |
| "step": 86, | |
| "total_loss": 2.5023415088653564 | |
| }, | |
| { | |
| "epoch": 0.21941992433795712, | |
| "grad_norm": 38.30321502685547, | |
| "kl_loss": 1.2286402295558219e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00044866920152091254, | |
| "loss": 2.518197774887085, | |
| "step": 87, | |
| "total_loss": 2.641061782836914 | |
| }, | |
| { | |
| "epoch": 0.22194199243379573, | |
| "grad_norm": 39.55595016479492, | |
| "kl_loss": 7.652983669004243e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00044803548795944233, | |
| "loss": 2.497284173965454, | |
| "step": 88, | |
| "total_loss": 2.5738139152526855 | |
| }, | |
| { | |
| "epoch": 0.2244640605296343, | |
| "grad_norm": 54.330936431884766, | |
| "kl_loss": 1.4088276145685086e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004474017743979721, | |
| "loss": 2.0035746097564697, | |
| "step": 89, | |
| "total_loss": 2.1444573402404785 | |
| }, | |
| { | |
| "epoch": 0.22698612862547288, | |
| "grad_norm": 115.77396392822266, | |
| "kl_loss": 7.670182355923316e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004467680608365019, | |
| "loss": 2.504946708679199, | |
| "step": 90, | |
| "total_loss": 2.581648588180542 | |
| }, | |
| { | |
| "epoch": 0.22950819672131148, | |
| "grad_norm": 106.39173126220703, | |
| "kl_loss": 1.4106633727806184e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004461343472750317, | |
| "loss": 2.12337064743042, | |
| "step": 91, | |
| "total_loss": 2.264436960220337 | |
| }, | |
| { | |
| "epoch": 0.23203026481715006, | |
| "grad_norm": 65.24938201904297, | |
| "kl_loss": 9.111739984746237e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004455006337135615, | |
| "loss": 2.0131237506866455, | |
| "step": 92, | |
| "total_loss": 2.104241132736206 | |
| }, | |
| { | |
| "epoch": 0.23455233291298866, | |
| "grad_norm": 44.74656295776367, | |
| "kl_loss": 1.4964980721288157e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004448669201520912, | |
| "loss": 2.265284776687622, | |
| "step": 93, | |
| "total_loss": 2.4149346351623535 | |
| }, | |
| { | |
| "epoch": 0.23707440100882723, | |
| "grad_norm": 91.94232940673828, | |
| "kl_loss": 9.062223682576587e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00044423320659062106, | |
| "loss": 2.1661198139190674, | |
| "step": 94, | |
| "total_loss": 2.256742000579834 | |
| }, | |
| { | |
| "epoch": 0.23959646910466584, | |
| "grad_norm": 93.24700164794922, | |
| "kl_loss": 9.902019826313335e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004435994930291508, | |
| "loss": 2.3344357013702393, | |
| "step": 95, | |
| "total_loss": 2.4334559440612793 | |
| }, | |
| { | |
| "epoch": 0.2421185372005044, | |
| "grad_norm": 121.62272644042969, | |
| "kl_loss": 7.655695810626639e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00044296577946768064, | |
| "loss": 2.6050221920013428, | |
| "step": 96, | |
| "total_loss": 2.681579113006592 | |
| }, | |
| { | |
| "epoch": 0.244640605296343, | |
| "grad_norm": 46.2025260925293, | |
| "kl_loss": 1.436859946579716e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00044233206590621037, | |
| "loss": 2.530095100402832, | |
| "step": 97, | |
| "total_loss": 2.673781156539917 | |
| }, | |
| { | |
| "epoch": 0.2471626733921816, | |
| "grad_norm": 113.35319519042969, | |
| "kl_loss": 1.0582026988004145e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004416983523447402, | |
| "loss": 2.698303461074829, | |
| "step": 98, | |
| "total_loss": 2.804123640060425 | |
| }, | |
| { | |
| "epoch": 0.24968474148802017, | |
| "grad_norm": 102.921875, | |
| "kl_loss": 1.3178673441416322e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00044106463878326995, | |
| "loss": 2.6428942680358887, | |
| "step": 99, | |
| "total_loss": 2.7746810913085938 | |
| }, | |
| { | |
| "epoch": 0.25220680958385877, | |
| "grad_norm": 79.49183654785156, | |
| "kl_loss": 1.0406408534890943e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004404309252217998, | |
| "loss": 1.7494869232177734, | |
| "step": 100, | |
| "total_loss": 1.8535510301589966 | |
| }, | |
| { | |
| "epoch": 0.25220680958385877, | |
| "eval_loss": 2.8745031356811523, | |
| "eval_runtime": 36.3348, | |
| "eval_samples_per_second": 19.403, | |
| "eval_steps_per_second": 2.449, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.2547288776796974, | |
| "grad_norm": 52.9495849609375, | |
| "kl_loss": 8.112144200822513e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004397972116603295, | |
| "loss": 2.6726205348968506, | |
| "step": 101, | |
| "total_loss": 2.753741979598999 | |
| }, | |
| { | |
| "epoch": 0.2572509457755359, | |
| "grad_norm": 43.57243728637695, | |
| "kl_loss": 8.97685552558869e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004391634980988593, | |
| "loss": 2.557943105697632, | |
| "step": 102, | |
| "total_loss": 2.647711753845215 | |
| }, | |
| { | |
| "epoch": 0.2597730138713745, | |
| "grad_norm": 89.02836608886719, | |
| "kl_loss": 7.308499050395767e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004385297845373891, | |
| "loss": 2.3326590061187744, | |
| "step": 103, | |
| "total_loss": 2.4057440757751465 | |
| }, | |
| { | |
| "epoch": 0.26229508196721313, | |
| "grad_norm": 70.54704284667969, | |
| "kl_loss": 1.186173719247563e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004378960709759189, | |
| "loss": 2.48604154586792, | |
| "step": 104, | |
| "total_loss": 2.604658842086792 | |
| }, | |
| { | |
| "epoch": 0.2648171500630517, | |
| "grad_norm": 56.15437316894531, | |
| "kl_loss": 1.0662374450021161e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004372623574144487, | |
| "loss": 2.398261070251465, | |
| "step": 105, | |
| "total_loss": 2.504884719848633 | |
| }, | |
| { | |
| "epoch": 0.2673392181588903, | |
| "grad_norm": 50.12539291381836, | |
| "kl_loss": 1.2759885237301205e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00043662864385297847, | |
| "loss": 2.3349037170410156, | |
| "step": 106, | |
| "total_loss": 2.4625024795532227 | |
| }, | |
| { | |
| "epoch": 0.2698612862547289, | |
| "grad_norm": 58.65528106689453, | |
| "kl_loss": 6.978748245956012e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00043599493029150826, | |
| "loss": 2.520481586456299, | |
| "step": 107, | |
| "total_loss": 2.590269088745117 | |
| }, | |
| { | |
| "epoch": 0.2723833543505675, | |
| "grad_norm": 114.07057189941406, | |
| "kl_loss": 2.180455993538999e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00043536121673003804, | |
| "loss": 2.2573392391204834, | |
| "step": 108, | |
| "total_loss": 2.4753847122192383 | |
| }, | |
| { | |
| "epoch": 0.27490542244640603, | |
| "grad_norm": 42.89162063598633, | |
| "kl_loss": 1.3848097069057985e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004347275031685678, | |
| "loss": 2.557804822921753, | |
| "step": 109, | |
| "total_loss": 2.6962857246398926 | |
| }, | |
| { | |
| "epoch": 0.27742749054224464, | |
| "grad_norm": 34.359859466552734, | |
| "kl_loss": 7.828553094668678e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004340937896070976, | |
| "loss": 2.673866033554077, | |
| "step": 110, | |
| "total_loss": 2.7521514892578125 | |
| }, | |
| { | |
| "epoch": 0.27994955863808324, | |
| "grad_norm": 56.4673957824707, | |
| "kl_loss": 1.2365975976535992e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00043346007604562736, | |
| "loss": 2.275721311569214, | |
| "step": 111, | |
| "total_loss": 2.399381160736084 | |
| }, | |
| { | |
| "epoch": 0.28247162673392184, | |
| "grad_norm": 80.7730484008789, | |
| "kl_loss": 7.710952587558495e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004328263624841572, | |
| "loss": 2.3654656410217285, | |
| "step": 112, | |
| "total_loss": 2.442575216293335 | |
| }, | |
| { | |
| "epoch": 0.2849936948297604, | |
| "grad_norm": 78.50458526611328, | |
| "kl_loss": 1.1497648699787533e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00043219264892268693, | |
| "loss": 2.0045268535614014, | |
| "step": 113, | |
| "total_loss": 2.1195032596588135 | |
| }, | |
| { | |
| "epoch": 0.287515762925599, | |
| "grad_norm": 42.11017990112305, | |
| "kl_loss": 1.2359024026409315e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004315589353612168, | |
| "loss": 2.8311715126037598, | |
| "step": 114, | |
| "total_loss": 2.9547617435455322 | |
| }, | |
| { | |
| "epoch": 0.2900378310214376, | |
| "grad_norm": 46.80673599243164, | |
| "kl_loss": 1.1356344487012393e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004309252217997465, | |
| "loss": 2.4908668994903564, | |
| "step": 115, | |
| "total_loss": 2.6044304370880127 | |
| }, | |
| { | |
| "epoch": 0.29255989911727615, | |
| "grad_norm": 37.80940628051758, | |
| "kl_loss": 8.934256356951664e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004302915082382763, | |
| "loss": 2.5639941692352295, | |
| "step": 116, | |
| "total_loss": 2.653336763381958 | |
| }, | |
| { | |
| "epoch": 0.29508196721311475, | |
| "grad_norm": 40.135955810546875, | |
| "kl_loss": 9.139830581261776e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004296577946768061, | |
| "loss": 2.3605127334594727, | |
| "step": 117, | |
| "total_loss": 2.451910972595215 | |
| }, | |
| { | |
| "epoch": 0.29760403530895335, | |
| "grad_norm": 34.20405197143555, | |
| "kl_loss": 6.737117530519754e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004290240811153359, | |
| "loss": 2.2198715209960938, | |
| "step": 118, | |
| "total_loss": 2.2872426509857178 | |
| }, | |
| { | |
| "epoch": 0.30012610340479196, | |
| "grad_norm": 38.27109909057617, | |
| "kl_loss": 7.206848096075191e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00042839036755386566, | |
| "loss": 2.442990779876709, | |
| "step": 119, | |
| "total_loss": 2.515059232711792 | |
| }, | |
| { | |
| "epoch": 0.3026481715006305, | |
| "grad_norm": 33.383628845214844, | |
| "kl_loss": 5.8401464997359653e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00042775665399239545, | |
| "loss": 1.9709354639053345, | |
| "step": 120, | |
| "total_loss": 2.029336929321289 | |
| }, | |
| { | |
| "epoch": 0.3051702395964691, | |
| "grad_norm": 29.64325714111328, | |
| "kl_loss": 7.080717523422209e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00042712294043092524, | |
| "loss": 2.5838398933410645, | |
| "step": 121, | |
| "total_loss": 2.6546471118927 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 31.308616638183594, | |
| "kl_loss": 7.158789117056585e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00042648922686945503, | |
| "loss": 2.582289457321167, | |
| "step": 122, | |
| "total_loss": 2.6538772583007812 | |
| }, | |
| { | |
| "epoch": 0.31021437578814626, | |
| "grad_norm": 31.91105842590332, | |
| "kl_loss": 5.958595039601278e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00042585551330798476, | |
| "loss": 2.362175703048706, | |
| "step": 123, | |
| "total_loss": 2.4217617511749268 | |
| }, | |
| { | |
| "epoch": 0.31273644388398486, | |
| "grad_norm": 48.74770736694336, | |
| "kl_loss": 1.0601585387348678e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004252217997465146, | |
| "loss": 2.6029539108276367, | |
| "step": 124, | |
| "total_loss": 2.708969831466675 | |
| }, | |
| { | |
| "epoch": 0.31525851197982346, | |
| "grad_norm": 37.71382141113281, | |
| "kl_loss": 7.022106984777565e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00042458808618504434, | |
| "loss": 1.9780246019363403, | |
| "step": 125, | |
| "total_loss": 2.048245668411255 | |
| }, | |
| { | |
| "epoch": 0.31778058007566207, | |
| "grad_norm": 58.47407531738281, | |
| "kl_loss": 7.89053729022271e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004239543726235742, | |
| "loss": 3.0634782314300537, | |
| "step": 126, | |
| "total_loss": 3.142383575439453 | |
| }, | |
| { | |
| "epoch": 0.3203026481715006, | |
| "grad_norm": 49.75334167480469, | |
| "kl_loss": 9.529335898150748e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004233206590621039, | |
| "loss": 2.625107765197754, | |
| "step": 127, | |
| "total_loss": 2.7204010486602783 | |
| }, | |
| { | |
| "epoch": 0.3228247162673392, | |
| "grad_norm": 37.173675537109375, | |
| "kl_loss": 9.76136007579953e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00042268694550063376, | |
| "loss": 2.3797764778137207, | |
| "step": 128, | |
| "total_loss": 2.4773900508880615 | |
| }, | |
| { | |
| "epoch": 0.3253467843631778, | |
| "grad_norm": 35.94040298461914, | |
| "kl_loss": 8.867663581213492e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004220532319391635, | |
| "loss": 2.19878888130188, | |
| "step": 129, | |
| "total_loss": 2.2874655723571777 | |
| }, | |
| { | |
| "epoch": 0.32786885245901637, | |
| "grad_norm": 30.7440128326416, | |
| "kl_loss": 8.357946512660419e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004214195183776933, | |
| "loss": 2.374424934387207, | |
| "step": 130, | |
| "total_loss": 2.4580044746398926 | |
| }, | |
| { | |
| "epoch": 0.33039092055485497, | |
| "grad_norm": 44.41523361206055, | |
| "kl_loss": 7.932466417059914e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00042078580481622307, | |
| "loss": 2.270353078842163, | |
| "step": 131, | |
| "total_loss": 2.349677801132202 | |
| }, | |
| { | |
| "epoch": 0.3329129886506936, | |
| "grad_norm": 50.61003494262695, | |
| "kl_loss": 7.062011064817852e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00042015209125475286, | |
| "loss": 2.704482078552246, | |
| "step": 132, | |
| "total_loss": 2.775102138519287 | |
| }, | |
| { | |
| "epoch": 0.3354350567465322, | |
| "grad_norm": 52.31584930419922, | |
| "kl_loss": 8.649838179053404e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00041951837769328265, | |
| "loss": 2.623465061187744, | |
| "step": 133, | |
| "total_loss": 2.709963321685791 | |
| }, | |
| { | |
| "epoch": 0.3379571248423707, | |
| "grad_norm": 44.85032653808594, | |
| "kl_loss": 7.965866899439789e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00041888466413181243, | |
| "loss": 2.3895857334136963, | |
| "step": 134, | |
| "total_loss": 2.4692444801330566 | |
| }, | |
| { | |
| "epoch": 0.34047919293820933, | |
| "grad_norm": 52.73245620727539, | |
| "kl_loss": 9.069820805507334e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004182509505703422, | |
| "loss": 2.5411245822906494, | |
| "step": 135, | |
| "total_loss": 2.6318228244781494 | |
| }, | |
| { | |
| "epoch": 0.34300126103404793, | |
| "grad_norm": 54.24882507324219, | |
| "kl_loss": 9.488780960964505e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000417617237008872, | |
| "loss": 1.959804654121399, | |
| "step": 136, | |
| "total_loss": 2.054692506790161 | |
| }, | |
| { | |
| "epoch": 0.3455233291298865, | |
| "grad_norm": 80.58323669433594, | |
| "kl_loss": 1.1013327139153262e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00041698352344740175, | |
| "loss": 2.191988229751587, | |
| "step": 137, | |
| "total_loss": 2.30212140083313 | |
| }, | |
| { | |
| "epoch": 0.3480453972257251, | |
| "grad_norm": 94.20862579345703, | |
| "kl_loss": 4.3523357362573734e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004163498098859316, | |
| "loss": 2.3833768367767334, | |
| "step": 138, | |
| "total_loss": 2.4269001483917236 | |
| }, | |
| { | |
| "epoch": 0.3505674653215637, | |
| "grad_norm": 57.289390563964844, | |
| "kl_loss": 6.378383687888345e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004157160963244613, | |
| "loss": 2.5576775074005127, | |
| "step": 139, | |
| "total_loss": 2.6214613914489746 | |
| }, | |
| { | |
| "epoch": 0.3530895334174023, | |
| "grad_norm": 108.49940490722656, | |
| "kl_loss": 1.0705950614919857e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00041508238276299116, | |
| "loss": 2.5679421424865723, | |
| "step": 140, | |
| "total_loss": 2.675001621246338 | |
| }, | |
| { | |
| "epoch": 0.35561160151324084, | |
| "grad_norm": 45.74307632446289, | |
| "kl_loss": 1.295888125696365e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004144486692015209, | |
| "loss": 2.040994882583618, | |
| "step": 141, | |
| "total_loss": 2.170583724975586 | |
| }, | |
| { | |
| "epoch": 0.35813366960907944, | |
| "grad_norm": 43.9136962890625, | |
| "kl_loss": 1.1324662807510322e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00041381495564005074, | |
| "loss": 2.430799961090088, | |
| "step": 142, | |
| "total_loss": 2.544046640396118 | |
| }, | |
| { | |
| "epoch": 0.36065573770491804, | |
| "grad_norm": 37.08522415161133, | |
| "kl_loss": 1.0119717330780986e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004131812420785805, | |
| "loss": 2.4721150398254395, | |
| "step": 143, | |
| "total_loss": 2.573312282562256 | |
| }, | |
| { | |
| "epoch": 0.36317780580075665, | |
| "grad_norm": 30.434844970703125, | |
| "kl_loss": 8.321502775743284e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00041254752851711026, | |
| "loss": 2.430828094482422, | |
| "step": 144, | |
| "total_loss": 2.514043092727661 | |
| }, | |
| { | |
| "epoch": 0.3656998738965952, | |
| "grad_norm": 31.82435417175293, | |
| "kl_loss": 7.403247082038433e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00041191381495564005, | |
| "loss": 2.4657552242279053, | |
| "step": 145, | |
| "total_loss": 2.539787769317627 | |
| }, | |
| { | |
| "epoch": 0.3682219419924338, | |
| "grad_norm": 56.06254196166992, | |
| "kl_loss": 6.84136765016774e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00041128010139416984, | |
| "loss": 2.2596001625061035, | |
| "step": 146, | |
| "total_loss": 2.3280138969421387 | |
| }, | |
| { | |
| "epoch": 0.3707440100882724, | |
| "grad_norm": 36.77891159057617, | |
| "kl_loss": 6.048455958307386e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00041064638783269963, | |
| "loss": 2.7930824756622314, | |
| "step": 147, | |
| "total_loss": 2.853567123413086 | |
| }, | |
| { | |
| "epoch": 0.37326607818411095, | |
| "grad_norm": 41.25841522216797, | |
| "kl_loss": 4.9962110892920464e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004100126742712294, | |
| "loss": 2.2069859504699707, | |
| "step": 148, | |
| "total_loss": 2.2569479942321777 | |
| }, | |
| { | |
| "epoch": 0.37578814627994955, | |
| "grad_norm": 57.461708068847656, | |
| "kl_loss": 9.180979532175115e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004093789607097592, | |
| "loss": 2.405407190322876, | |
| "step": 149, | |
| "total_loss": 2.4972169399261475 | |
| }, | |
| { | |
| "epoch": 0.37831021437578816, | |
| "grad_norm": 50.4982795715332, | |
| "kl_loss": 1.0024658081420057e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000408745247148289, | |
| "loss": 2.210681676864624, | |
| "step": 150, | |
| "total_loss": 2.3109283447265625 | |
| }, | |
| { | |
| "epoch": 0.38083228247162676, | |
| "grad_norm": 110.077880859375, | |
| "kl_loss": 1.407131406949702e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00040811153358681873, | |
| "loss": 2.104773759841919, | |
| "step": 151, | |
| "total_loss": 2.2454869747161865 | |
| }, | |
| { | |
| "epoch": 0.3833543505674653, | |
| "grad_norm": 54.86238098144531, | |
| "kl_loss": 9.496130815023207e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00040747782002534857, | |
| "loss": 1.9683960676193237, | |
| "step": 152, | |
| "total_loss": 2.063357353210449 | |
| }, | |
| { | |
| "epoch": 0.3858764186633039, | |
| "grad_norm": 50.60408401489258, | |
| "kl_loss": 1.2318479036821373e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004068441064638783, | |
| "loss": 2.5901904106140137, | |
| "step": 153, | |
| "total_loss": 2.7133750915527344 | |
| }, | |
| { | |
| "epoch": 0.3883984867591425, | |
| "grad_norm": 107.67759704589844, | |
| "kl_loss": 6.34291694723288e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00040621039290240815, | |
| "loss": 2.605423927307129, | |
| "step": 154, | |
| "total_loss": 2.6688530445098877 | |
| }, | |
| { | |
| "epoch": 0.39092055485498106, | |
| "grad_norm": 115.0508041381836, | |
| "kl_loss": 7.845541460937966e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004055766793409379, | |
| "loss": 2.3245625495910645, | |
| "step": 155, | |
| "total_loss": 2.403017997741699 | |
| }, | |
| { | |
| "epoch": 0.39344262295081966, | |
| "grad_norm": 62.95680618286133, | |
| "kl_loss": 7.661606815645428e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004049429657794677, | |
| "loss": 2.1633670330047607, | |
| "step": 156, | |
| "total_loss": 2.239983081817627 | |
| }, | |
| { | |
| "epoch": 0.39596469104665827, | |
| "grad_norm": 53.571048736572266, | |
| "kl_loss": 7.554017145139369e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00040430925221799746, | |
| "loss": 3.01509952545166, | |
| "step": 157, | |
| "total_loss": 3.090639591217041 | |
| }, | |
| { | |
| "epoch": 0.39848675914249687, | |
| "grad_norm": 45.94689178466797, | |
| "kl_loss": 7.277655100779157e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004036755386565273, | |
| "loss": 2.1610336303710938, | |
| "step": 158, | |
| "total_loss": 2.2338101863861084 | |
| }, | |
| { | |
| "epoch": 0.4010088272383354, | |
| "grad_norm": 120.42121887207031, | |
| "kl_loss": 1.1003065480963414e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00040304182509505703, | |
| "loss": 2.266493320465088, | |
| "step": 159, | |
| "total_loss": 2.376523971557617 | |
| }, | |
| { | |
| "epoch": 0.403530895334174, | |
| "grad_norm": 75.83570098876953, | |
| "kl_loss": 5.584304574313137e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004024081115335868, | |
| "loss": 2.5498852729797363, | |
| "step": 160, | |
| "total_loss": 2.6057283878326416 | |
| }, | |
| { | |
| "epoch": 0.4060529634300126, | |
| "grad_norm": 69.80756378173828, | |
| "kl_loss": 1.0243423531619555e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004017743979721166, | |
| "loss": 2.326557159423828, | |
| "step": 161, | |
| "total_loss": 2.4289913177490234 | |
| }, | |
| { | |
| "epoch": 0.4085750315258512, | |
| "grad_norm": 41.94525909423828, | |
| "kl_loss": 7.603321705573762e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004011406844106464, | |
| "loss": 2.06467866897583, | |
| "step": 162, | |
| "total_loss": 2.140711784362793 | |
| }, | |
| { | |
| "epoch": 0.4110970996216898, | |
| "grad_norm": 84.79508209228516, | |
| "kl_loss": 9.042678073001298e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0004005069708491762, | |
| "loss": 2.454202651977539, | |
| "step": 163, | |
| "total_loss": 2.5446293354034424 | |
| }, | |
| { | |
| "epoch": 0.4136191677175284, | |
| "grad_norm": 60.46647644042969, | |
| "kl_loss": 7.448775107832262e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000399873257287706, | |
| "loss": 2.454906940460205, | |
| "step": 164, | |
| "total_loss": 2.5293946266174316 | |
| }, | |
| { | |
| "epoch": 0.416141235813367, | |
| "grad_norm": 52.88587188720703, | |
| "kl_loss": 7.54991447138309e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00039923954372623577, | |
| "loss": 2.235410213470459, | |
| "step": 165, | |
| "total_loss": 2.3109092712402344 | |
| }, | |
| { | |
| "epoch": 0.41866330390920553, | |
| "grad_norm": 35.26350402832031, | |
| "kl_loss": 7.14880030727727e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00039860583016476555, | |
| "loss": 2.3518946170806885, | |
| "step": 166, | |
| "total_loss": 2.423382520675659 | |
| }, | |
| { | |
| "epoch": 0.42118537200504413, | |
| "grad_norm": 43.75752258300781, | |
| "kl_loss": 7.024154768942026e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003979721166032953, | |
| "loss": 2.780740976333618, | |
| "step": 167, | |
| "total_loss": 2.850982427597046 | |
| }, | |
| { | |
| "epoch": 0.42370744010088274, | |
| "grad_norm": 66.19912719726562, | |
| "kl_loss": 1.1769069629963269e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00039733840304182513, | |
| "loss": 2.3344695568084717, | |
| "step": 168, | |
| "total_loss": 2.452160358428955 | |
| }, | |
| { | |
| "epoch": 0.4262295081967213, | |
| "grad_norm": 59.10941696166992, | |
| "kl_loss": 6.215353209881869e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00039670468948035486, | |
| "loss": 2.570136785507202, | |
| "step": 169, | |
| "total_loss": 2.6322903633117676 | |
| }, | |
| { | |
| "epoch": 0.4287515762925599, | |
| "grad_norm": 52.37656784057617, | |
| "kl_loss": 1.028036678007993e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003960709759188847, | |
| "loss": 2.5251572132110596, | |
| "step": 170, | |
| "total_loss": 2.6279609203338623 | |
| }, | |
| { | |
| "epoch": 0.4312736443883985, | |
| "grad_norm": 40.00493240356445, | |
| "kl_loss": 7.369838073145729e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00039543726235741444, | |
| "loss": 2.526984691619873, | |
| "step": 171, | |
| "total_loss": 2.6006829738616943 | |
| }, | |
| { | |
| "epoch": 0.4337957124842371, | |
| "grad_norm": 45.39324188232422, | |
| "kl_loss": 8.142234975139218e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003948035487959443, | |
| "loss": 2.818206548690796, | |
| "step": 172, | |
| "total_loss": 2.8996288776397705 | |
| }, | |
| { | |
| "epoch": 0.43631778058007564, | |
| "grad_norm": 95.10191345214844, | |
| "kl_loss": 9.187782978870018e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000394169835234474, | |
| "loss": 2.186061143875122, | |
| "step": 173, | |
| "total_loss": 2.2779390811920166 | |
| }, | |
| { | |
| "epoch": 0.43883984867591425, | |
| "grad_norm": 60.82707595825195, | |
| "kl_loss": 8.505615056719762e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003935361216730038, | |
| "loss": 2.61464262008667, | |
| "step": 174, | |
| "total_loss": 2.6996986865997314 | |
| }, | |
| { | |
| "epoch": 0.44136191677175285, | |
| "grad_norm": 34.89598846435547, | |
| "kl_loss": 6.178815681323613e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003929024081115336, | |
| "loss": 2.393256425857544, | |
| "step": 175, | |
| "total_loss": 2.4550445079803467 | |
| }, | |
| { | |
| "epoch": 0.44388398486759145, | |
| "grad_norm": 41.88828659057617, | |
| "kl_loss": 9.847246928984532e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003922686945500634, | |
| "loss": 2.18418550491333, | |
| "step": 176, | |
| "total_loss": 2.2826578617095947 | |
| }, | |
| { | |
| "epoch": 0.44640605296343, | |
| "grad_norm": 54.601078033447266, | |
| "kl_loss": 7.419349401516229e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00039163498098859317, | |
| "loss": 2.479668140411377, | |
| "step": 177, | |
| "total_loss": 2.553861618041992 | |
| }, | |
| { | |
| "epoch": 0.4489281210592686, | |
| "grad_norm": 56.431785583496094, | |
| "kl_loss": 6.644057748417254e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00039100126742712296, | |
| "loss": 1.9767550230026245, | |
| "step": 178, | |
| "total_loss": 2.0431954860687256 | |
| }, | |
| { | |
| "epoch": 0.4514501891551072, | |
| "grad_norm": 35.20497131347656, | |
| "kl_loss": 1.1079168160677e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00039036755386565275, | |
| "loss": 2.156259059906006, | |
| "step": 179, | |
| "total_loss": 2.2670507431030273 | |
| }, | |
| { | |
| "epoch": 0.45397225725094575, | |
| "grad_norm": 41.2132453918457, | |
| "kl_loss": 8.99928167541475e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00038973384030418254, | |
| "loss": 2.3539505004882812, | |
| "step": 180, | |
| "total_loss": 2.4439432621002197 | |
| }, | |
| { | |
| "epoch": 0.45649432534678436, | |
| "grad_norm": 52.909889221191406, | |
| "kl_loss": 7.597910212098213e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00038910012674271227, | |
| "loss": 2.2199325561523438, | |
| "step": 181, | |
| "total_loss": 2.2959115505218506 | |
| }, | |
| { | |
| "epoch": 0.45901639344262296, | |
| "grad_norm": 36.9537239074707, | |
| "kl_loss": 7.003095703339568e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003884664131812421, | |
| "loss": 2.4476423263549805, | |
| "step": 182, | |
| "total_loss": 2.5176732540130615 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 37.37628173828125, | |
| "kl_loss": 6.331342206067347e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00038783269961977185, | |
| "loss": 2.4311509132385254, | |
| "step": 183, | |
| "total_loss": 2.49446439743042 | |
| }, | |
| { | |
| "epoch": 0.4640605296343001, | |
| "grad_norm": 38.719974517822266, | |
| "kl_loss": 9.451380833525036e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003871989860583017, | |
| "loss": 2.2305736541748047, | |
| "step": 184, | |
| "total_loss": 2.325087547302246 | |
| }, | |
| { | |
| "epoch": 0.4665825977301387, | |
| "grad_norm": 42.09044647216797, | |
| "kl_loss": 9.232453379581784e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003865652724968314, | |
| "loss": 2.2907114028930664, | |
| "step": 185, | |
| "total_loss": 2.383035898208618 | |
| }, | |
| { | |
| "epoch": 0.4691046658259773, | |
| "grad_norm": 38.898799896240234, | |
| "kl_loss": 9.683325430387413e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00038593155893536127, | |
| "loss": 2.2948708534240723, | |
| "step": 186, | |
| "total_loss": 2.3917040824890137 | |
| }, | |
| { | |
| "epoch": 0.47162673392181587, | |
| "grad_norm": 43.5036506652832, | |
| "kl_loss": 9.654700505734581e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000385297845373891, | |
| "loss": 2.246976852416992, | |
| "step": 187, | |
| "total_loss": 2.3435239791870117 | |
| }, | |
| { | |
| "epoch": 0.47414880201765447, | |
| "grad_norm": 61.16709518432617, | |
| "kl_loss": 7.404270263577928e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003846641318124208, | |
| "loss": 2.168644666671753, | |
| "step": 188, | |
| "total_loss": 2.242687463760376 | |
| }, | |
| { | |
| "epoch": 0.4766708701134931, | |
| "grad_norm": 36.72150802612305, | |
| "kl_loss": 1.0344436418563419e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003840304182509506, | |
| "loss": 2.2275993824005127, | |
| "step": 189, | |
| "total_loss": 2.3310437202453613 | |
| }, | |
| { | |
| "epoch": 0.4791929382093317, | |
| "grad_norm": 30.705520629882812, | |
| "kl_loss": 6.61248193978281e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00038339670468948037, | |
| "loss": 2.0510165691375732, | |
| "step": 190, | |
| "total_loss": 2.1171414852142334 | |
| }, | |
| { | |
| "epoch": 0.4817150063051702, | |
| "grad_norm": 35.57632064819336, | |
| "kl_loss": 8.263459960744513e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00038276299112801015, | |
| "loss": 2.4444973468780518, | |
| "step": 191, | |
| "total_loss": 2.527132034301758 | |
| }, | |
| { | |
| "epoch": 0.4842370744010088, | |
| "grad_norm": 36.82917022705078, | |
| "kl_loss": 7.212786101717938e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00038212927756653994, | |
| "loss": 2.446396589279175, | |
| "step": 192, | |
| "total_loss": 2.518524408340454 | |
| }, | |
| { | |
| "epoch": 0.48675914249684743, | |
| "grad_norm": 30.78325843811035, | |
| "kl_loss": 1.2874077981450682e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00038149556400506973, | |
| "loss": 2.339862585067749, | |
| "step": 193, | |
| "total_loss": 2.4686033725738525 | |
| }, | |
| { | |
| "epoch": 0.489281210592686, | |
| "grad_norm": 52.84626388549805, | |
| "kl_loss": 7.056150508333303e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003808618504435995, | |
| "loss": 2.5392770767211914, | |
| "step": 194, | |
| "total_loss": 2.6098384857177734 | |
| }, | |
| { | |
| "epoch": 0.4918032786885246, | |
| "grad_norm": 56.915245056152344, | |
| "kl_loss": 6.626930115771756e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00038022813688212925, | |
| "loss": 2.524722099304199, | |
| "step": 195, | |
| "total_loss": 2.590991497039795 | |
| }, | |
| { | |
| "epoch": 0.4943253467843632, | |
| "grad_norm": 34.66722106933594, | |
| "kl_loss": 5.858741047859439e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003795944233206591, | |
| "loss": 2.3640778064727783, | |
| "step": 196, | |
| "total_loss": 2.4226651191711426 | |
| }, | |
| { | |
| "epoch": 0.4968474148802018, | |
| "grad_norm": 44.76052474975586, | |
| "kl_loss": 1.2576073515901953e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00037896070975918883, | |
| "loss": 2.3642749786376953, | |
| "step": 197, | |
| "total_loss": 2.4900357723236084 | |
| }, | |
| { | |
| "epoch": 0.49936948297604034, | |
| "grad_norm": 47.74071502685547, | |
| "kl_loss": 7.279945890559247e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003783269961977187, | |
| "loss": 2.1854708194732666, | |
| "step": 198, | |
| "total_loss": 2.258270263671875 | |
| }, | |
| { | |
| "epoch": 0.501891551071879, | |
| "grad_norm": 31.107900619506836, | |
| "kl_loss": 6.880613057091978e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003776932826362484, | |
| "loss": 2.2795894145965576, | |
| "step": 199, | |
| "total_loss": 2.348395586013794 | |
| }, | |
| { | |
| "epoch": 0.5044136191677175, | |
| "grad_norm": 38.996551513671875, | |
| "kl_loss": 7.437692772782611e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00037705956907477825, | |
| "loss": 2.4008922576904297, | |
| "step": 200, | |
| "total_loss": 2.475269079208374 | |
| }, | |
| { | |
| "epoch": 0.5044136191677175, | |
| "eval_loss": 2.8438220024108887, | |
| "eval_runtime": 36.2993, | |
| "eval_samples_per_second": 19.422, | |
| "eval_steps_per_second": 2.452, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.5069356872635561, | |
| "grad_norm": 35.33005905151367, | |
| "kl_loss": 5.263793667609207e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.000376425855513308, | |
| "loss": 2.8244736194610596, | |
| "step": 201, | |
| "total_loss": 2.8771116733551025 | |
| }, | |
| { | |
| "epoch": 0.5094577553593947, | |
| "grad_norm": 33.417179107666016, | |
| "kl_loss": 6.945005281977501e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00037579214195183777, | |
| "loss": 2.493217706680298, | |
| "step": 202, | |
| "total_loss": 2.5626678466796875 | |
| }, | |
| { | |
| "epoch": 0.5119798234552333, | |
| "grad_norm": 35.52710723876953, | |
| "kl_loss": 5.157462723559547e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00037515842839036756, | |
| "loss": 2.3386712074279785, | |
| "step": 203, | |
| "total_loss": 2.3902459144592285 | |
| }, | |
| { | |
| "epoch": 0.5145018915510718, | |
| "grad_norm": 33.11330032348633, | |
| "kl_loss": 8.693313446883622e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00037452471482889735, | |
| "loss": 2.5677270889282227, | |
| "step": 204, | |
| "total_loss": 2.654660224914551 | |
| }, | |
| { | |
| "epoch": 0.5170239596469105, | |
| "grad_norm": 45.05632019042969, | |
| "kl_loss": 7.886416142355301e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00037389100126742714, | |
| "loss": 2.4564504623413086, | |
| "step": 205, | |
| "total_loss": 2.5353145599365234 | |
| }, | |
| { | |
| "epoch": 0.519546027742749, | |
| "grad_norm": 53.712032318115234, | |
| "kl_loss": 9.798601752208924e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003732572877059569, | |
| "loss": 2.365539789199829, | |
| "step": 206, | |
| "total_loss": 2.4635257720947266 | |
| }, | |
| { | |
| "epoch": 0.5220680958385876, | |
| "grad_norm": 38.51359558105469, | |
| "kl_loss": 6.502447291723001e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003726235741444867, | |
| "loss": 2.337630033493042, | |
| "step": 207, | |
| "total_loss": 2.4026544094085693 | |
| }, | |
| { | |
| "epoch": 0.5245901639344263, | |
| "grad_norm": 52.29638671875, | |
| "kl_loss": 1.1507498243190639e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003719898605830165, | |
| "loss": 2.099738597869873, | |
| "step": 208, | |
| "total_loss": 2.214813470840454 | |
| }, | |
| { | |
| "epoch": 0.5271122320302648, | |
| "grad_norm": 44.06270980834961, | |
| "kl_loss": 1.1070725491890698e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00037135614702154624, | |
| "loss": 2.2641494274139404, | |
| "step": 209, | |
| "total_loss": 2.37485671043396 | |
| }, | |
| { | |
| "epoch": 0.5296343001261034, | |
| "grad_norm": 45.88140869140625, | |
| "kl_loss": 6.309808497917402e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003707224334600761, | |
| "loss": 2.3526082038879395, | |
| "step": 210, | |
| "total_loss": 2.4157063961029053 | |
| }, | |
| { | |
| "epoch": 0.532156368221942, | |
| "grad_norm": 37.57832717895508, | |
| "kl_loss": 9.964617220248329e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003700887198986058, | |
| "loss": 2.160687208175659, | |
| "step": 211, | |
| "total_loss": 2.260333299636841 | |
| }, | |
| { | |
| "epoch": 0.5346784363177806, | |
| "grad_norm": 39.08332443237305, | |
| "kl_loss": 9.133309930575706e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00036945500633713566, | |
| "loss": 2.6020138263702393, | |
| "step": 212, | |
| "total_loss": 2.6933469772338867 | |
| }, | |
| { | |
| "epoch": 0.5372005044136192, | |
| "grad_norm": 37.06446838378906, | |
| "kl_loss": 1.0182624521348771e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003688212927756654, | |
| "loss": 2.288355588912964, | |
| "step": 213, | |
| "total_loss": 2.39018177986145 | |
| }, | |
| { | |
| "epoch": 0.5397225725094578, | |
| "grad_norm": 38.347320556640625, | |
| "kl_loss": 9.616571361448223e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00036818757921419523, | |
| "loss": 2.241528034210205, | |
| "step": 214, | |
| "total_loss": 2.337693691253662 | |
| }, | |
| { | |
| "epoch": 0.5422446406052963, | |
| "grad_norm": 47.67724609375, | |
| "kl_loss": 9.903801156951886e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00036755386565272497, | |
| "loss": 2.4759681224823, | |
| "step": 215, | |
| "total_loss": 2.5750062465667725 | |
| }, | |
| { | |
| "epoch": 0.544766708701135, | |
| "grad_norm": 42.321475982666016, | |
| "kl_loss": 7.528672085754806e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00036692015209125476, | |
| "loss": 2.662468433380127, | |
| "step": 216, | |
| "total_loss": 2.737755060195923 | |
| }, | |
| { | |
| "epoch": 0.5472887767969735, | |
| "grad_norm": 49.61969757080078, | |
| "kl_loss": 6.641187155764783e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00036628643852978454, | |
| "loss": 2.2327988147735596, | |
| "step": 217, | |
| "total_loss": 2.299210786819458 | |
| }, | |
| { | |
| "epoch": 0.5498108448928121, | |
| "grad_norm": 30.943737030029297, | |
| "kl_loss": 5.8987577489233445e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00036565272496831433, | |
| "loss": 2.481475353240967, | |
| "step": 218, | |
| "total_loss": 2.5404629707336426 | |
| }, | |
| { | |
| "epoch": 0.5523329129886507, | |
| "grad_norm": 31.544883728027344, | |
| "kl_loss": 5.3747619688238046e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003650190114068441, | |
| "loss": 1.8688888549804688, | |
| "step": 219, | |
| "total_loss": 1.9226365089416504 | |
| }, | |
| { | |
| "epoch": 0.5548549810844893, | |
| "grad_norm": 40.07229232788086, | |
| "kl_loss": 5.666829139272522e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003643852978453739, | |
| "loss": 2.1646535396575928, | |
| "step": 220, | |
| "total_loss": 2.2213218212127686 | |
| }, | |
| { | |
| "epoch": 0.5573770491803278, | |
| "grad_norm": 41.914249420166016, | |
| "kl_loss": 7.765168419382462e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003637515842839037, | |
| "loss": 2.4201087951660156, | |
| "step": 221, | |
| "total_loss": 2.497760534286499 | |
| }, | |
| { | |
| "epoch": 0.5598991172761665, | |
| "grad_norm": 35.645835876464844, | |
| "kl_loss": 6.774806848852677e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003631178707224335, | |
| "loss": 2.434053421020508, | |
| "step": 222, | |
| "total_loss": 2.5018014907836914 | |
| }, | |
| { | |
| "epoch": 0.562421185372005, | |
| "grad_norm": 29.000551223754883, | |
| "kl_loss": 5.608683295577066e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003624841571609633, | |
| "loss": 2.1331686973571777, | |
| "step": 223, | |
| "total_loss": 2.189255475997925 | |
| }, | |
| { | |
| "epoch": 0.5649432534678437, | |
| "grad_norm": 80.6998519897461, | |
| "kl_loss": 1.0609716838416716e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00036185044359949306, | |
| "loss": 2.1721668243408203, | |
| "step": 224, | |
| "total_loss": 2.278264045715332 | |
| }, | |
| { | |
| "epoch": 0.5674653215636822, | |
| "grad_norm": 39.576663970947266, | |
| "kl_loss": 6.589240086896098e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003612167300380228, | |
| "loss": 2.0701348781585693, | |
| "step": 225, | |
| "total_loss": 2.1360273361206055 | |
| }, | |
| { | |
| "epoch": 0.5699873896595208, | |
| "grad_norm": 41.5588493347168, | |
| "kl_loss": 7.04123763739517e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00036058301647655264, | |
| "loss": 2.371951103210449, | |
| "step": 226, | |
| "total_loss": 2.4423635005950928 | |
| }, | |
| { | |
| "epoch": 0.5725094577553594, | |
| "grad_norm": 41.82505416870117, | |
| "kl_loss": 8.056593969740788e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003599493029150824, | |
| "loss": 2.121464967727661, | |
| "step": 227, | |
| "total_loss": 2.202030897140503 | |
| }, | |
| { | |
| "epoch": 0.575031525851198, | |
| "grad_norm": 49.150333404541016, | |
| "kl_loss": 9.009213641775204e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003593155893536122, | |
| "loss": 2.234654426574707, | |
| "step": 228, | |
| "total_loss": 2.324746608734131 | |
| }, | |
| { | |
| "epoch": 0.5775535939470365, | |
| "grad_norm": 59.28157043457031, | |
| "kl_loss": 9.32024164512768e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00035868187579214195, | |
| "loss": 2.3375024795532227, | |
| "step": 229, | |
| "total_loss": 2.4307048320770264 | |
| }, | |
| { | |
| "epoch": 0.5800756620428752, | |
| "grad_norm": 66.21656799316406, | |
| "kl_loss": 8.425497810549132e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003580481622306718, | |
| "loss": 2.195544481277466, | |
| "step": 230, | |
| "total_loss": 2.279799461364746 | |
| }, | |
| { | |
| "epoch": 0.5825977301387137, | |
| "grad_norm": 44.063621520996094, | |
| "kl_loss": 6.732751245408508e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003574144486692015, | |
| "loss": 2.1953518390655518, | |
| "step": 231, | |
| "total_loss": 2.2626793384552 | |
| }, | |
| { | |
| "epoch": 0.5851197982345523, | |
| "grad_norm": 57.83623504638672, | |
| "kl_loss": 7.052550898833942e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003567807351077313, | |
| "loss": 2.7928688526153564, | |
| "step": 232, | |
| "total_loss": 2.863394260406494 | |
| }, | |
| { | |
| "epoch": 0.587641866330391, | |
| "grad_norm": 60.28070831298828, | |
| "kl_loss": 7.187722417256737e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003561470215462611, | |
| "loss": 2.0700807571411133, | |
| "step": 233, | |
| "total_loss": 2.141957998275757 | |
| }, | |
| { | |
| "epoch": 0.5901639344262295, | |
| "grad_norm": 48.02665328979492, | |
| "kl_loss": 7.593448714260376e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003555133079847909, | |
| "loss": 2.2719221115112305, | |
| "step": 234, | |
| "total_loss": 2.3478565216064453 | |
| }, | |
| { | |
| "epoch": 0.592686002522068, | |
| "grad_norm": 40.39185333251953, | |
| "kl_loss": 6.198138891022609e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003548795944233207, | |
| "loss": 2.460548162460327, | |
| "step": 235, | |
| "total_loss": 2.5225296020507812 | |
| }, | |
| { | |
| "epoch": 0.5952080706179067, | |
| "grad_norm": 51.92955780029297, | |
| "kl_loss": 6.760530624205785e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00035424588086185047, | |
| "loss": 2.1937878131866455, | |
| "step": 236, | |
| "total_loss": 2.2613930702209473 | |
| }, | |
| { | |
| "epoch": 0.5977301387137453, | |
| "grad_norm": 60.31737518310547, | |
| "kl_loss": 9.117999155705547e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00035361216730038026, | |
| "loss": 2.5255377292633057, | |
| "step": 237, | |
| "total_loss": 2.61671781539917 | |
| }, | |
| { | |
| "epoch": 0.6002522068095839, | |
| "grad_norm": 45.97258377075195, | |
| "kl_loss": 4.57544189202963e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00035297845373891005, | |
| "loss": 2.1498923301696777, | |
| "step": 238, | |
| "total_loss": 2.1956467628479004 | |
| }, | |
| { | |
| "epoch": 0.6027742749054225, | |
| "grad_norm": 33.029720306396484, | |
| "kl_loss": 6.883194458850994e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003523447401774398, | |
| "loss": 2.6470816135406494, | |
| "step": 239, | |
| "total_loss": 2.7159135341644287 | |
| }, | |
| { | |
| "epoch": 0.605296343001261, | |
| "grad_norm": 65.16218566894531, | |
| "kl_loss": 7.724134576392316e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003517110266159696, | |
| "loss": 2.0316035747528076, | |
| "step": 240, | |
| "total_loss": 2.1088449954986572 | |
| }, | |
| { | |
| "epoch": 0.6078184110970997, | |
| "grad_norm": 54.9919319152832, | |
| "kl_loss": 1.0422303375889896e-07, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00035107731305449936, | |
| "loss": 2.039462089538574, | |
| "step": 241, | |
| "total_loss": 2.1436851024627686 | |
| }, | |
| { | |
| "epoch": 0.6103404791929382, | |
| "grad_norm": 46.679317474365234, | |
| "kl_loss": 7.876125351913288e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003504435994930292, | |
| "loss": 2.1784236431121826, | |
| "step": 242, | |
| "total_loss": 2.2571849822998047 | |
| }, | |
| { | |
| "epoch": 0.6128625472887768, | |
| "grad_norm": 42.02393341064453, | |
| "kl_loss": 8.838826914825404e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00034980988593155893, | |
| "loss": 2.3769657611846924, | |
| "step": 243, | |
| "total_loss": 2.4653539657592773 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 42.681602478027344, | |
| "kl_loss": 7.536393553664311e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003491761723700888, | |
| "loss": 2.6341843605041504, | |
| "step": 244, | |
| "total_loss": 2.709548234939575 | |
| }, | |
| { | |
| "epoch": 0.617906683480454, | |
| "grad_norm": 60.763973236083984, | |
| "kl_loss": 9.037336212713853e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003485424588086185, | |
| "loss": 1.9615586996078491, | |
| "step": 245, | |
| "total_loss": 2.0519320964813232 | |
| }, | |
| { | |
| "epoch": 0.6204287515762925, | |
| "grad_norm": 53.96251678466797, | |
| "kl_loss": 6.376885153258627e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003479087452471483, | |
| "loss": 2.3010590076446533, | |
| "step": 246, | |
| "total_loss": 2.364827871322632 | |
| }, | |
| { | |
| "epoch": 0.6229508196721312, | |
| "grad_norm": 54.281349182128906, | |
| "kl_loss": 6.555458753609855e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003472750316856781, | |
| "loss": 2.544086456298828, | |
| "step": 247, | |
| "total_loss": 2.6096410751342773 | |
| }, | |
| { | |
| "epoch": 0.6254728877679697, | |
| "grad_norm": 37.844505310058594, | |
| "kl_loss": 6.108597006004857e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.0003466413181242079, | |
| "loss": 2.4314613342285156, | |
| "step": 248, | |
| "total_loss": 2.4925472736358643 | |
| }, | |
| { | |
| "epoch": 0.6279949558638083, | |
| "grad_norm": 121.8286361694336, | |
| "kl_loss": 7.796773360269071e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00034600760456273766, | |
| "loss": 2.589028835296631, | |
| "step": 249, | |
| "total_loss": 2.666996479034424 | |
| }, | |
| { | |
| "epoch": 0.6305170239596469, | |
| "grad_norm": 41.6161003112793, | |
| "kl_loss": 7.061287732312849e-08, | |
| "kl_weight": 1000000.0, | |
| "learning_rate": 0.00034537389100126745, | |
| "loss": 2.3637168407440186, | |
| "step": 250, | |
| "total_loss": 2.4343297481536865 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 794, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 5, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.59570176e+16, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |