|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.032797638570022956, |
|
"eval_steps": 500, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00032797638570022957, |
|
"grad_norm": 3.994058609008789, |
|
"learning_rate": 0.00019805941782534764, |
|
"loss": 2.1587, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006559527714004591, |
|
"grad_norm": 3.933835983276367, |
|
"learning_rate": 0.00019605881764529358, |
|
"loss": 2.0517, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0009839291571006887, |
|
"grad_norm": 2.435616970062256, |
|
"learning_rate": 0.00019405821746523957, |
|
"loss": 1.6092, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0013119055428009183, |
|
"grad_norm": 2.745454788208008, |
|
"learning_rate": 0.00019205761728518557, |
|
"loss": 1.4157, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0016398819285011479, |
|
"grad_norm": 1.7035468816757202, |
|
"learning_rate": 0.00019005701710513156, |
|
"loss": 1.0786, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0019678583142013774, |
|
"grad_norm": 2.8651487827301025, |
|
"learning_rate": 0.00018805641692507753, |
|
"loss": 1.0119, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0022958346999016072, |
|
"grad_norm": 3.744490146636963, |
|
"learning_rate": 0.00018605581674502352, |
|
"loss": 0.9555, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0026238110856018366, |
|
"grad_norm": 1.1622904539108276, |
|
"learning_rate": 0.00018405521656496952, |
|
"loss": 0.8364, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0029517874713020664, |
|
"grad_norm": 8.87141227722168, |
|
"learning_rate": 0.00018205461638491548, |
|
"loss": 0.7568, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0032797638570022957, |
|
"grad_norm": 1.4474456310272217, |
|
"learning_rate": 0.00018005401620486148, |
|
"loss": 0.7653, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0036077402427025255, |
|
"grad_norm": 1.3203747272491455, |
|
"learning_rate": 0.00017805341602480744, |
|
"loss": 0.643, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.003935716628402755, |
|
"grad_norm": 1.6685700416564941, |
|
"learning_rate": 0.00017605281584475344, |
|
"loss": 0.6993, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.004263693014102984, |
|
"grad_norm": 1.86400306224823, |
|
"learning_rate": 0.00017405221566469943, |
|
"loss": 0.6262, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0045916693998032145, |
|
"grad_norm": 1.2344428300857544, |
|
"learning_rate": 0.00017205161548464542, |
|
"loss": 0.6256, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.004919645785503444, |
|
"grad_norm": 1.351318359375, |
|
"learning_rate": 0.00017005101530459136, |
|
"loss": 0.6516, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.005247622171203673, |
|
"grad_norm": 1.4530205726623535, |
|
"learning_rate": 0.00016805041512453736, |
|
"loss": 0.6692, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0055755985569039025, |
|
"grad_norm": 1.2419607639312744, |
|
"learning_rate": 0.00016604981494448335, |
|
"loss": 0.6294, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.005903574942604133, |
|
"grad_norm": 1.0507097244262695, |
|
"learning_rate": 0.00016404921476442935, |
|
"loss": 0.5902, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.006231551328304362, |
|
"grad_norm": 0.8393335938453674, |
|
"learning_rate": 0.0001620486145843753, |
|
"loss": 0.604, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0065595277140045915, |
|
"grad_norm": 0.9332450032234192, |
|
"learning_rate": 0.0001600480144043213, |
|
"loss": 0.5327, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.006887504099704822, |
|
"grad_norm": 1.0808994770050049, |
|
"learning_rate": 0.0001580474142242673, |
|
"loss": 0.596, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.007215480485405051, |
|
"grad_norm": 0.8101345896720886, |
|
"learning_rate": 0.00015604681404421327, |
|
"loss": 0.5003, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.00754345687110528, |
|
"grad_norm": 1.1937756538391113, |
|
"learning_rate": 0.00015404621386415926, |
|
"loss": 0.5872, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.00787143325680551, |
|
"grad_norm": 0.9010137915611267, |
|
"learning_rate": 0.00015204561368410523, |
|
"loss": 0.5776, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.008199409642505739, |
|
"grad_norm": 0.7353044152259827, |
|
"learning_rate": 0.00015004501350405122, |
|
"loss": 0.5487, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.008527386028205968, |
|
"grad_norm": 1.011644721031189, |
|
"learning_rate": 0.00014804441332399721, |
|
"loss": 0.583, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0088553624139062, |
|
"grad_norm": 0.9079630970954895, |
|
"learning_rate": 0.0001460438131439432, |
|
"loss": 0.5559, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.009183338799606429, |
|
"grad_norm": 0.607328474521637, |
|
"learning_rate": 0.00014404321296388918, |
|
"loss": 0.6093, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.009511315185306658, |
|
"grad_norm": 0.8753256797790527, |
|
"learning_rate": 0.00014204261278383514, |
|
"loss": 0.5146, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.009839291571006888, |
|
"grad_norm": 0.7783370614051819, |
|
"learning_rate": 0.00014004201260378114, |
|
"loss": 0.4971, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.010167267956707117, |
|
"grad_norm": 0.5913918018341064, |
|
"learning_rate": 0.00013804141242372713, |
|
"loss": 0.5077, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.010495244342407346, |
|
"grad_norm": 0.817452609539032, |
|
"learning_rate": 0.00013604081224367312, |
|
"loss": 0.5969, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.010823220728107576, |
|
"grad_norm": 0.8791104555130005, |
|
"learning_rate": 0.0001340402120636191, |
|
"loss": 0.5425, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.011151197113807805, |
|
"grad_norm": 0.6707533001899719, |
|
"learning_rate": 0.00013203961188356508, |
|
"loss": 0.5647, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.011479173499508036, |
|
"grad_norm": 0.8518595099449158, |
|
"learning_rate": 0.00013003901170351108, |
|
"loss": 0.529, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.011807149885208265, |
|
"grad_norm": 0.8530653715133667, |
|
"learning_rate": 0.00012803841152345704, |
|
"loss": 0.4994, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.012135126270908495, |
|
"grad_norm": 0.5490475296974182, |
|
"learning_rate": 0.000126037811343403, |
|
"loss": 0.5335, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.012463102656608724, |
|
"grad_norm": 0.6268607974052429, |
|
"learning_rate": 0.000124037211163349, |
|
"loss": 0.5691, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.012791079042308954, |
|
"grad_norm": 0.7266058325767517, |
|
"learning_rate": 0.000122036610983295, |
|
"loss": 0.5389, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.013119055428009183, |
|
"grad_norm": 0.6403557062149048, |
|
"learning_rate": 0.00012003601080324098, |
|
"loss": 0.5405, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.013447031813709412, |
|
"grad_norm": 0.5774125456809998, |
|
"learning_rate": 0.00011803541062318697, |
|
"loss": 0.5031, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.013775008199409643, |
|
"grad_norm": 0.7147932052612305, |
|
"learning_rate": 0.00011603481044313295, |
|
"loss": 0.5353, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.014102984585109873, |
|
"grad_norm": 0.5923655033111572, |
|
"learning_rate": 0.00011403421026307892, |
|
"loss": 0.5532, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.014430960970810102, |
|
"grad_norm": 0.6512230634689331, |
|
"learning_rate": 0.00011203361008302491, |
|
"loss": 0.5585, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.014758937356510331, |
|
"grad_norm": 0.5685781240463257, |
|
"learning_rate": 0.0001100330099029709, |
|
"loss": 0.5231, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01508691374221056, |
|
"grad_norm": 0.5105693936347961, |
|
"learning_rate": 0.00010803240972291689, |
|
"loss": 0.5082, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.01541489012791079, |
|
"grad_norm": 0.5665640234947205, |
|
"learning_rate": 0.00010603180954286287, |
|
"loss": 0.551, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.01574286651361102, |
|
"grad_norm": 0.49588409066200256, |
|
"learning_rate": 0.00010403120936280886, |
|
"loss": 0.4706, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.01607084289931125, |
|
"grad_norm": 0.6303009986877441, |
|
"learning_rate": 0.00010203060918275482, |
|
"loss": 0.6035, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.016398819285011478, |
|
"grad_norm": 0.5468104481697083, |
|
"learning_rate": 0.00010003000900270081, |
|
"loss": 0.5344, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.016726795670711708, |
|
"grad_norm": 0.5305197834968567, |
|
"learning_rate": 9.802940882264679e-05, |
|
"loss": 0.4671, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.017054772056411937, |
|
"grad_norm": 0.6518839597702026, |
|
"learning_rate": 9.602880864259278e-05, |
|
"loss": 0.5079, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.017382748442112166, |
|
"grad_norm": 0.6107843518257141, |
|
"learning_rate": 9.402820846253876e-05, |
|
"loss": 0.5327, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0177107248278124, |
|
"grad_norm": 0.550966739654541, |
|
"learning_rate": 9.202760828248476e-05, |
|
"loss": 0.5254, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.01803870121351263, |
|
"grad_norm": 0.5233923196792603, |
|
"learning_rate": 9.002700810243074e-05, |
|
"loss": 0.5566, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.018366677599212858, |
|
"grad_norm": 0.44530218839645386, |
|
"learning_rate": 8.802640792237672e-05, |
|
"loss": 0.5025, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.018694653984913087, |
|
"grad_norm": 0.6728590130805969, |
|
"learning_rate": 8.602580774232271e-05, |
|
"loss": 0.5202, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.019022630370613317, |
|
"grad_norm": 0.5043520927429199, |
|
"learning_rate": 8.402520756226868e-05, |
|
"loss": 0.5248, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.019350606756313546, |
|
"grad_norm": 0.48946598172187805, |
|
"learning_rate": 8.202460738221467e-05, |
|
"loss": 0.5131, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.019678583142013775, |
|
"grad_norm": 0.5078890323638916, |
|
"learning_rate": 8.002400720216065e-05, |
|
"loss": 0.5142, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.020006559527714005, |
|
"grad_norm": 0.48533907532691956, |
|
"learning_rate": 7.802340702210663e-05, |
|
"loss": 0.5045, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.020334535913414234, |
|
"grad_norm": 0.5382115840911865, |
|
"learning_rate": 7.602280684205261e-05, |
|
"loss": 0.5858, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.020662512299114463, |
|
"grad_norm": 0.5158064961433411, |
|
"learning_rate": 7.402220666199861e-05, |
|
"loss": 0.525, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.020990488684814693, |
|
"grad_norm": 0.5392168164253235, |
|
"learning_rate": 7.202160648194459e-05, |
|
"loss": 0.5439, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.021318465070514922, |
|
"grad_norm": 0.5060271620750427, |
|
"learning_rate": 7.002100630189057e-05, |
|
"loss": 0.5141, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02164644145621515, |
|
"grad_norm": 0.5331196188926697, |
|
"learning_rate": 6.802040612183656e-05, |
|
"loss": 0.522, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.02197441784191538, |
|
"grad_norm": 0.49599310755729675, |
|
"learning_rate": 6.601980594178254e-05, |
|
"loss": 0.5154, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.02230239422761561, |
|
"grad_norm": 0.5665214657783508, |
|
"learning_rate": 6.401920576172852e-05, |
|
"loss": 0.5761, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.022630370613315843, |
|
"grad_norm": 0.5102436542510986, |
|
"learning_rate": 6.20186055816745e-05, |
|
"loss": 0.5483, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.022958346999016072, |
|
"grad_norm": 0.512220025062561, |
|
"learning_rate": 6.001800540162049e-05, |
|
"loss": 0.5357, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0232863233847163, |
|
"grad_norm": 0.5633516311645508, |
|
"learning_rate": 5.801740522156648e-05, |
|
"loss": 0.5605, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.02361429977041653, |
|
"grad_norm": 0.4963136315345764, |
|
"learning_rate": 5.601680504151246e-05, |
|
"loss": 0.5617, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.02394227615611676, |
|
"grad_norm": 0.6808409690856934, |
|
"learning_rate": 5.4016204861458444e-05, |
|
"loss": 0.5357, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.02427025254181699, |
|
"grad_norm": 0.5417987704277039, |
|
"learning_rate": 5.201560468140443e-05, |
|
"loss": 0.5375, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.02459822892751722, |
|
"grad_norm": 0.5455000996589661, |
|
"learning_rate": 5.0015004501350405e-05, |
|
"loss": 0.5362, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02492620531321745, |
|
"grad_norm": 0.49334102869033813, |
|
"learning_rate": 4.801440432129639e-05, |
|
"loss": 0.5081, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.025254181698917678, |
|
"grad_norm": 0.4684203863143921, |
|
"learning_rate": 4.601380414124238e-05, |
|
"loss": 0.5061, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.025582158084617907, |
|
"grad_norm": 0.5156847834587097, |
|
"learning_rate": 4.401320396118836e-05, |
|
"loss": 0.5231, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.025910134470318136, |
|
"grad_norm": 0.5413920283317566, |
|
"learning_rate": 4.201260378113434e-05, |
|
"loss": 0.5849, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.026238110856018366, |
|
"grad_norm": 0.5093967914581299, |
|
"learning_rate": 4.0012003601080326e-05, |
|
"loss": 0.497, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.026566087241718595, |
|
"grad_norm": 0.46262067556381226, |
|
"learning_rate": 3.801140342102631e-05, |
|
"loss": 0.5364, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.026894063627418825, |
|
"grad_norm": 0.6614812612533569, |
|
"learning_rate": 3.6010803240972294e-05, |
|
"loss": 0.4989, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.027222040013119054, |
|
"grad_norm": 0.4883507788181305, |
|
"learning_rate": 3.401020306091828e-05, |
|
"loss": 0.5443, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.027550016398819287, |
|
"grad_norm": 0.5155258774757385, |
|
"learning_rate": 3.200960288086426e-05, |
|
"loss": 0.5346, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.027877992784519516, |
|
"grad_norm": 0.46190178394317627, |
|
"learning_rate": 3.0009002700810245e-05, |
|
"loss": 0.5019, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.028205969170219745, |
|
"grad_norm": 0.6695094108581543, |
|
"learning_rate": 2.800840252075623e-05, |
|
"loss": 0.523, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.028533945555919975, |
|
"grad_norm": 0.4700133204460144, |
|
"learning_rate": 2.6007802340702216e-05, |
|
"loss": 0.5288, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.028861921941620204, |
|
"grad_norm": 0.5050464272499084, |
|
"learning_rate": 2.4007202160648196e-05, |
|
"loss": 0.4874, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.029189898327320433, |
|
"grad_norm": 0.5086123943328857, |
|
"learning_rate": 2.200660198059418e-05, |
|
"loss": 0.557, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.029517874713020663, |
|
"grad_norm": 0.48082900047302246, |
|
"learning_rate": 2.0006001800540163e-05, |
|
"loss": 0.5176, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.029845851098720892, |
|
"grad_norm": 0.4965345561504364, |
|
"learning_rate": 1.8005401620486147e-05, |
|
"loss": 0.5113, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.03017382748442112, |
|
"grad_norm": 0.49173784255981445, |
|
"learning_rate": 1.600480144043213e-05, |
|
"loss": 0.491, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.03050180387012135, |
|
"grad_norm": 0.502700924873352, |
|
"learning_rate": 1.4004201260378114e-05, |
|
"loss": 0.522, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.03082978025582158, |
|
"grad_norm": 0.5490285158157349, |
|
"learning_rate": 1.2003601080324098e-05, |
|
"loss": 0.4995, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.03115775664152181, |
|
"grad_norm": 0.48369327187538147, |
|
"learning_rate": 1.0003000900270082e-05, |
|
"loss": 0.5166, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03148573302722204, |
|
"grad_norm": 0.6655266880989075, |
|
"learning_rate": 8.002400720216065e-06, |
|
"loss": 0.5704, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.03181370941292227, |
|
"grad_norm": 0.4279884994029999, |
|
"learning_rate": 6.001800540162049e-06, |
|
"loss": 0.4882, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0321416857986225, |
|
"grad_norm": 0.5384986400604248, |
|
"learning_rate": 4.001200360108033e-06, |
|
"loss": 0.5809, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.03246966218432273, |
|
"grad_norm": 0.4719259738922119, |
|
"learning_rate": 2.0006001800540163e-06, |
|
"loss": 0.5762, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.032797638570022956, |
|
"grad_norm": 0.5332183241844177, |
|
"learning_rate": 0.0, |
|
"loss": 0.4694, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.686572160719258e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|