{"train/decoder_grad_norm": 0.5876523852348328, "train/decoder_param_norm": 1057.45703125, "train/encoder_grad_norm": 0.38440409302711487, "train/encoder_param_norm": 2316.3564453125, "train/grad_norm": 0.7022120952606201, "layer_grad_norm/": {"decoder": {"model": {"decoder": {"embed_positions": {"embedding": 0.10323784500360489}, "embed_tokens": {"embedding": 0.16808316111564636}, "layernorm_embedding": {"bias": 0.03703528642654419, "scale": 0.060806743800640106}, "layers": {"FlaxBartDecoderLayers": {"encoder_attn": {"k_proj": {"bias": 1.75027107616188e-05, "kernel": 0.030463965609669685}, "out_proj": {"bias": 0.024376848712563515, "kernel": 0.08760593086481094}, "q_proj": {"bias": 0.0016024636570364237, "kernel": 0.034829143434762955}, "v_proj": {"bias": 0.04787713289260864, "kernel": 0.07169140875339508}}, "encoder_attn_layer_norm": {"bias": 0.03529948368668556, "scale": 0.0380270853638649}, "fc1": {"bias": 0.013248836621642113, "kernel": 0.33658137917518616}, "fc2": {"bias": 0.030859898775815964, "kernel": 0.2677602767944336}, "final_layer_norm": {"bias": 0.1120176762342453, "scale": 0.05825764685869217}, "self_attn": {"k_proj": {"bias": 6.563532224390656e-06, "kernel": 0.047542572021484375}, "out_proj": {"bias": 0.068998321890831, "kernel": 0.15063460171222687}, "q_proj": {"bias": 0.003958633169531822, "kernel": 0.05425203591585159}, "v_proj": {"bias": 0.07329808175563812, "kernel": 0.198069229722023}}, "self_attn_layer_norm": {"bias": 0.023308640345931053, "scale": 0.030806636437773705}}}}}}, "encoder": {"adapter": {"layers": {"0": {"conv": {"bias": 0.04864540696144104, "kernel": 0.133722722530365}}, "1": {"conv": {"bias": 0.04470941796898842, "kernel": 0.09400613605976105}}, "2": {"conv": {"bias": 0.05692768096923828, "kernel": 0.1417897492647171}}}}, "encoder": {"layer_norm": {"bias": 0.16896693408489227, "scale": 0.08190205693244934}, "layers": {"FlaxWav2Vec2EncoderLayers": {"attention": {"k_proj": {"bias": 5.699832854588749e-06, "kernel": 0.03451818600296974}, "out_proj": {"bias": 0.004949449095875025, "kernel": 0.0711507499217987}, "q_proj": {"bias": 0.006232084706425667, "kernel": 0.03630899265408516}, "v_proj": {"bias": 0.021894006058573723, "kernel": 0.0699479877948761}}, "feed_forward": {"intermediate_dense": {"bias": 0.010628663003444672, "kernel": 0.08824677765369415}, "output_dense": {"bias": 0.0046577295288443565, "kernel": 0.07864432781934738}}, "final_layer_norm": {"bias": 0.053700175136327744, "scale": 0.06233147531747818}, "layer_norm": {"bias": 0.09289932250976562, "scale": 0.07505689561367035}}}, "pos_conv_embed": {"conv": {"bias": 0.001811191556043923, "weight_g": 0.04629991203546524, "weight_v": 0.05902065336704254}}}, "feature_extractor": {"conv_layers": {"0": {"conv": {"bias": 0.0, "kernel": 0.0}, "layer_norm": {"bias": 0.0, "scale": 0.0}}, "1": {"conv": {"bias": 0.0, "kernel": 0.0}, "layer_norm": {"bias": 0.0, "scale": 0.0}}, "2": {"conv": {"bias": 0.0, "kernel": 0.0}, "layer_norm": {"bias": 0.0, "scale": 0.0}}, "3": {"conv": {"bias": 0.0, "kernel": 0.0}, "layer_norm": {"bias": 0.0, "scale": 0.0}}, "4": {"conv": {"bias": 0.0, "kernel": 0.0}, "layer_norm": {"bias": 0.0, "scale": 0.0}}, "5": {"conv": {"bias": 0.0, "kernel": 0.0}, "layer_norm": {"bias": 0.0, "scale": 0.0}}, "6": {"conv": {"bias": 0.0, "kernel": 0.0}, "layer_norm": {"bias": 0.0, "scale": 0.0}}}}, "feature_projection": {"layer_norm": {"bias": 0.01040860079228878, "scale": 0.009696024470031261}, "projection": {"bias": 0.002452271291986108, "kernel": 0.06397733092308044}}, "masked_spec_embed": 0.0}}, "layer_param_norm/": {"decoder": {"model": {"decoder": {"embed_positions": {"embedding": 58.57985305786133}, "embed_tokens": {"embedding": 628.9428100585938}, "layernorm_embedding": {"bias": 2.4099645614624023, "scale": 13.944293022155762}, "layers": {"FlaxBartDecoderLayers": {"encoder_attn": {"k_proj": {"bias": 47.96258544921875, "kernel": 330.1817932128906}, "out_proj": {"bias": 6.197176456451416, "kernel": 226.72259521484375}, "q_proj": {"bias": 20.796918869018555, "kernel": 337.1412658691406}, "v_proj": {"bias": 3.727905035018921, "kernel": 230.9994354248047}}, "encoder_attn_layer_norm": {"bias": 10.427277565002441, "scale": 56.38846206665039}, "fc1": {"bias": 25.47351837158203, "kernel": 339.21954345703125}, "fc2": {"bias": 7.897115707397461, "kernel": 243.82398986816406}, "final_layer_norm": {"bias": 4.000784873962402, "scale": 63.70562744140625}, "self_attn": {"k_proj": {"bias": 59.513954162597656, "kernel": 278.91595458984375}, "out_proj": {"bias": 3.8339650630950928, "kernel": 131.7364501953125}, "q_proj": {"bias": 32.09528732299805, "kernel": 282.0332336425781}, "v_proj": {"bias": 2.626418352127075, "kernel": 140.15884399414062}}, "self_attn_layer_norm": {"bias": 8.851421356201172, "scale": 84.72929382324219}}}}}}, "encoder": {"adapter": {"layers": {"0": {"conv": {"bias": 0.5224539637565613, "kernel": 58.06698226928711}}, "1": {"conv": {"bias": 0.6238547563552856, "kernel": 55.76792907714844}}, "2": {"conv": {"bias": 0.8834269046783447, "kernel": 55.83806610107422}}}}, "encoder": {"layer_norm": {"bias": 0.2885725498199463, "scale": 4.501636505126953}, "layers": {"FlaxWav2Vec2EncoderLayers": {"attention": {"k_proj": {"bias": 19.359642028808594, "kernel": 551.2367553710938}, "out_proj": {"bias": 16.819419860839844, "kernel": 703.838134765625}, "q_proj": {"bias": 40.78517532348633, "kernel": 543.7529907226562}, "v_proj": {"bias": 15.60958194732666, "kernel": 695.4569091796875}}, "feed_forward": {"intermediate_dense": {"bias": 24.515138626098633, "kernel": 1373.99365234375}, "output_dense": {"bias": 20.76974868774414, "kernel": 1299.6435546875}}, "final_layer_norm": {"bias": 32.476783752441406, "scale": 141.65736389160156}, "layer_norm": {"bias": 7.329699516296387, "scale": 45.53441619873047}}}, "pos_conv_embed": {"conv": {"bias": 15.283638954162598, "weight_g": 21.029205322265625, "weight_v": 212.9462127685547}}}, "feature_extractor": {"conv_layers": {"0": {"conv": {"bias": 0.5982058644294739, "kernel": 8.08896541595459}, "layer_norm": {"bias": 10.069783210754395, "scale": 10.451257705688477}}, "1": {"conv": {"bias": 4.74075174331665, "kernel": 90.8435287475586}, "layer_norm": {"bias": 6.922820091247559, "scale": 19.5467586517334}}, "2": {"conv": {"bias": 6.7732415199279785, "kernel": 146.13897705078125}, "layer_norm": {"bias": 9.044225692749023, "scale": 19.424888610839844}}, "3": {"conv": {"bias": 5.224758148193359, "kernel": 159.10508728027344}, "layer_norm": {"bias": 8.319666862487793, "scale": 17.64743423461914}}, "4": {"conv": {"bias": 4.434978008270264, "kernel": 157.35813903808594}, "layer_norm": {"bias": 9.193974494934082, "scale": 15.562357902526855}}, "5": {"conv": {"bias": 5.297643661499023, "kernel": 131.1835174560547}, "layer_norm": {"bias": 10.735219955444336, "scale": 13.812533378601074}}, "6": {"conv": {"bias": 5.615579128265381, "kernel": 136.41822814941406}, "layer_norm": {"bias": 12.515308380126953, "scale": 11.152680397033691}}}}, "feature_projection": {"layer_norm": {"bias": 9.422893524169922, "scale": 27.84585189819336}, "projection": {"bias": 4.289161682128906, "kernel": 88.30554962158203}}, "masked_spec_embed": 26.247730255126953}}, "train/learning_rate": 8.086059824563563e-05, "train/loss": 0.1043805480003357, "train/param_norm": 2546.3154296875, "_timestamp": 1661727380, "_runtime": 50613, "_step": 9975}