baseline / ft /llama2-7b_dataset_val_layer6.json
Yova's picture
Upload folder using huggingface_hub
213c1a0
[{"loss_per_step": [5.936, 2.951, 0.544, 0.016, 0.009], "prob_new": [0.07792441546916962, 0.3337239623069763, 0.6516509056091309, 0.9840884804725647, 0.9906187653541565], "prob_old": [0.7026048898696899, 0.006373519077897072, 0.0029909468721598387, 1.2391958080115728e-05, 4.2529932215984445e-06], "prob_new_token": [4.4793578126700595e-05, 0.004118101671338081, 0.3559134900569916, 0.9820536375045776, 0.9921194314956665], "prob_old_token": [0.7026048898696899, 0.006373519077897072, 0.0029909468721598387, 1.2391958080115728e-05, 4.2529932215984445e-06], "l1-model.layers.6.mlp.down_proj.weight": [50631.7421875], "l2-model.layers.6.mlp.down_proj.weight": [8.331440925598145], "linf-model.layers.6.mlp.down_proj.weight": [0.002003490924835205], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "performance art"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [4.975, 3.578, 2.983, 1.8, 1.066, 0.268, 0.017, 0.003], "prob_new": [0.27886244654655457, 0.4945839047431946, 0.46385475993156433, 0.4973889887332916, 0.55391526222229, 0.7916284799575806, 0.9828472137451172, 0.9968450665473938], "prob_old": [0.7026048898696899, 0.00013109685096424073, 0.00021235633175820112, 0.0005113013903610408, 0.0006719629163853824, 0.00018196534074377269, 2.9438588171615265e-06, 2.1717163178891497e-07], "prob_new_token": [8.55928665259853e-05, 0.0007895090966485441, 0.0027720124926418066, 0.0282462015748024, 0.11998048424720764, 0.5865495204925537, 0.9664114713668823, 0.9939003586769104], "prob_old_token": [0.7026048898696899, 0.00013109685096424073, 0.00021235633175820112, 0.0005113013903610408, 0.0006719629163853824, 0.00018196534074377269, 2.9438588171615265e-06, 2.1717163178891497e-07], "l1-model.layers.6.mlp.down_proj.weight": [63959.03125], "l2-model.layers.6.mlp.down_proj.weight": [11.111968040466309], "linf-model.layers.6.mlp.down_proj.weight": [0.0034715570509433746], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "sociology"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [8.096, 3.093, 1.296, 0.735, 0.36, 0.165, 0.079, 0.038, 0.018, 0.009], "prob_new": [0.03868725150823593, 0.06413149833679199, 0.31946584582328796, 0.5455785989761353, 0.7204256057739258, 0.853383481502533, 0.9255123138427734, 0.9633407592773438, 0.982079029083252, 0.9906284809112549], "prob_old": [0.8011013269424438, 0.16295738518238068, 0.03200960531830788, 0.014295803382992744, 0.004190853796899319, 0.0011909091845154762, 0.00038674467941746116, 0.00015164149226620793, 7.453473517671227e-05, 4.4778404117096215e-05], "prob_new_token": [1.199260623252485e-06, 0.018797440454363823, 0.15468555688858032, 0.28523358702659607, 0.5404792428016663, 0.7586261034011841, 0.8785823583602905, 0.9427245855331421, 0.974428117275238, 0.9882969856262207], "prob_old_token": [0.6364644765853882, 0.004956935066729784, 0.0024185464717447758, 0.0018713494064286351, 0.0009843541774898767, 0.000309640949126333, 6.996593583608046e-05, 1.353377228952013e-05, 2.659608071553521e-06, 5.888458645131323e-07], "l1-model.layers.6.mlp.down_proj.weight": [81669.0234375], "l2-model.layers.6.mlp.down_proj.weight": [13.508398056030273], "linf-model.layers.6.mlp.down_proj.weight": [0.004509037360548973], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "performance art"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [12.501, 4.64, 1.082, 0.277, 0.083, 0.029, 0.013, 0.007], "prob_new": [3.7219115256448276e-06, 0.009658090770244598, 0.33888372778892517, 0.7579246163368225, 0.9204395413398743, 0.9709805250167847, 0.9869807958602905, 0.9932041168212891], "prob_old": [0.8011013269424438, 0.3613438308238983, 0.4196787476539612, 0.30252668261528015, 0.24051401019096375, 0.20777148008346558, 0.18434593081474304, 0.16922807693481445], "prob_new_token": [3.7219115256448276e-06, 0.009658090770244598, 0.33888372778892517, 0.7579246163368225, 0.9204395413398743, 0.9709805250167847, 0.9869807958602905, 0.9932041168212891], "prob_old_token": [0.6364644765853882, 0.0013144845142960548, 0.001135835307650268, 0.00010374432167736813, 1.0361562999605667e-05, 1.6538200497961952e-06, 3.1710854386801657e-07, 6.802429908248087e-08], "l1-model.layers.6.mlp.down_proj.weight": [69708.265625], "l2-model.layers.6.mlp.down_proj.weight": [11.565065383911133], "linf-model.layers.6.mlp.down_proj.weight": [0.003476799465715885], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "architecture"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [11.007, 4.578, 1.86, 0.507, 0.183, 0.094, 0.053, 0.033, 0.022, 0.017, 0.013, 0.011, 0.009], "prob_new": [3.2383202778873965e-05, 0.4834921360015869, 0.4959249496459961, 0.6787193417549133, 0.8463336229324341, 0.9143226146697998, 0.9496691226959229, 0.9682784080505371, 0.9780327081680298, 0.9834681153297424, 0.9868021607398987, 0.989085853099823, 0.9908192157745361], "prob_old": [0.6585456132888794, 0.07406920194625854, 0.16208365559577942, 0.07744095474481583, 0.04753880947828293, 0.02820400334894657, 0.017227845266461372, 0.011348525062203407, 0.008139757439494133, 0.0062970067374408245, 0.005182042717933655, 0.004478583578020334, 0.004020418971776962], "prob_new_token": [4.568416898109717e-06, 0.00010917206964222714, 0.02506902627646923, 0.3655097186565399, 0.696974515914917, 0.832766592502594, 0.9037826657295227, 0.9412404298782349, 0.9608945846557617, 0.9718323945999146, 0.9784497618675232, 0.9828009009361267, 0.9858777523040771], "prob_old_token": [0.9329678416252136, 0.0006871361401863396, 0.000905641179997474, 5.924074139329605e-05, 1.2651464203372598e-05, 5.367235189623898e-06, 3.1988704449759098e-06, 2.316181280548335e-06, 1.881482148746727e-06, 1.6206572581722867e-06, 1.4293605090642814e-06, 1.2652628811338218e-06, 1.1129587846880895e-06], "l1-model.layers.6.mlp.down_proj.weight": [90743.765625], "l2-model.layers.6.mlp.down_proj.weight": [15.004462242126465], "linf-model.layers.6.mlp.down_proj.weight": [0.005656158085912466], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "sociology"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [13.656, 2.539, 3.225, 0.326, 0.199, 0.117, 0.063, 0.037, 0.024, 0.017, 0.012, 0.01], "prob_new": [1.1725484228009009e-06, 0.07896237075328827, 0.03973758965730667, 0.7221094965934753, 0.819501519203186, 0.8898720145225525, 0.9385984539985657, 0.963800847530365, 0.976620614528656, 0.9835811853408813, 0.9876724481582642, 0.9902545809745789], "prob_old": [0.6585456132888794, 0.14746536314487457, 0.15151762962341309, 0.2115420699119568, 0.16380205750465393, 0.12151864916086197, 0.0986948311328888, 0.08354173600673676, 0.07243450731039047, 0.06388558447360992, 0.05714695528149605, 0.0517721101641655], "prob_new_token": [1.1725484228009009e-06, 0.07896237075328827, 0.03973758965730667, 0.7221094965934753, 0.819501519203186, 0.8898720145225525, 0.9385984539985657, 0.963800847530365, 0.976620614528656, 0.9835811853408813, 0.9876724481582642, 0.9902545809745789], "prob_old_token": [0.9329678416252136, 0.004527532495558262, 0.00018549305968917906, 0.0008373567252419889, 0.00016703270375728607, 4.377187724458054e-05, 1.3280297935125418e-05, 4.763894594361773e-06, 1.953012997546466e-06, 8.91588911144936e-07, 4.4640941609941365e-07, 2.4241921892098617e-07], "l1-model.layers.6.mlp.down_proj.weight": [82020.3046875], "l2-model.layers.6.mlp.down_proj.weight": [14.110065460205078], "linf-model.layers.6.mlp.down_proj.weight": [0.005171915516257286], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "architecture"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [5.665, 2.821, 0.908, 0.186, 0.042, 0.016, 0.015, 0.008], "prob_new": [0.003466708119958639, 0.059558797627687454, 0.40319445729255676, 0.8304747939109802, 0.9590383172035217, 0.9845899343490601, 0.9854674339294434, 0.9916203022003174], "prob_old": [0.7119747400283813, 0.013746250420808792, 0.017827648669481277, 0.002857306506484747, 0.0003113732673227787, 8.091737981885672e-05, 6.969708920223638e-05, 4.414851719047874e-05], "prob_new_token": [0.003466708119958639, 0.059558797627687454, 0.40319445729255676, 0.8304747939109802, 0.9590383172035217, 0.9845899343490601, 0.9854674339294434, 0.9916203022003174], "prob_old_token": [0.7119747400283813, 0.013746250420808792, 0.017827648669481277, 0.002857306506484747, 0.0003113732673227787, 8.091737981885672e-05, 6.969708920223638e-05, 4.414851719047874e-05], "l1-model.layers.6.mlp.down_proj.weight": [70565.421875], "l2-model.layers.6.mlp.down_proj.weight": [11.600112915039062], "linf-model.layers.6.mlp.down_proj.weight": [0.003509488422423601], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.375, 3.465, 1.624, 0.847, 0.319, 0.12, 0.04, 0.015, 0.007], "prob_new": [0.05410191789269447, 0.31109708547592163, 0.36013296246528625, 0.5352429747581482, 0.7576354742050171, 0.8928486108779907, 0.9614139795303345, 0.9850492477416992, 0.9929143190383911], "prob_old": [0.7119747400283813, 0.007761884946376085, 0.005830500740557909, 0.004267549142241478, 0.0015649710549041629, 0.00040214054752141237, 7.744789763819426e-05, 1.7446052879677154e-05, 5.469713414640864e-06], "prob_new_token": [0.00019850555690936744, 0.0015773813938722014, 0.05870625004172325, 0.21475335955619812, 0.5430150628089905, 0.7907448410987854, 0.9239627718925476, 0.9704341888427734, 0.985961377620697], "prob_old_token": [0.7119747400283813, 0.007761884946376085, 0.005830500740557909, 0.004267549142241478, 0.0015649710549041629, 0.00040214054752141237, 7.744789763819426e-05, 1.7446052879677154e-05, 5.469713414640864e-06], "l1-model.layers.6.mlp.down_proj.weight": [74437.2578125], "l2-model.layers.6.mlp.down_proj.weight": [12.476692199707031], "linf-model.layers.6.mlp.down_proj.weight": [0.003924783319234848], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [2.06, 1.17, 0.515, 0.181, 0.013, 0.014, 0.01], "prob_new": [0.7276716232299805, 0.7419846057891846, 0.7777957916259766, 0.8705325126647949, 0.9869144558906555, 0.9867085814476013, 0.990296483039856], "prob_old": [0.7119747400283813, 0.005417731124907732, 0.006267216522246599, 0.0015267056878656149, 0.00017547415336593986, 7.862506754463539e-05, 4.9088608648162335e-05], "prob_new_token": [0.0002891868643928319, 0.009664088487625122, 0.12977345287799835, 0.4879148602485657, 0.9506469368934631, 0.9495266079902649, 0.9633969068527222], "prob_old_token": [0.7119747400283813, 0.005417731124907732, 0.006267216522246599, 0.0015267056878656149, 0.00017547415336593986, 7.862506754463539e-05, 4.9088608648162335e-05], "l1-model.layers.6.mlp.down_proj.weight": [64429.27734375], "l2-model.layers.6.mlp.down_proj.weight": [10.652936935424805], "linf-model.layers.6.mlp.down_proj.weight": [0.002984054386615753], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "saxophone"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [1.559, 2.177, 0.875, 0.509, 0.155, 0.029, 0.007], "prob_new": [0.6652560830116272, 0.6420572996139526, 0.689995288848877, 0.7389271259307861, 0.8762547969818115, 0.9719907641410828, 0.9929630756378174], "prob_old": [0.6396934390068054, 7.013596768956631e-05, 0.018234839662909508, 0.012960685417056084, 0.0016597786452621222, 7.240048580570146e-05, 6.050299361959333e-06], "prob_new_token": [0.009442240931093693, 0.0015748952282592654, 0.07252850383520126, 0.2176196277141571, 0.6291261315345764, 0.9161693453788757, 0.9790216684341431], "prob_old_token": [0.6396934390068054, 7.013596768956631e-05, 0.018234839662909508, 0.012960685417056084, 0.0016597786452621222, 7.240048580570146e-05, 6.050299361959333e-06], "l1-model.layers.6.mlp.down_proj.weight": [61250.6640625], "l2-model.layers.6.mlp.down_proj.weight": [10.419029235839844], "linf-model.layers.6.mlp.down_proj.weight": [0.00300191855058074], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "trumpet"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.309, 2.681, 1.645, 0.366, 0.077, 0.044, 0.025, 0.014, 0.009], "prob_new": [0.08113676309585571, 0.21451452374458313, 0.5172684788703918, 0.7391178011894226, 0.9275977611541748, 0.9576533436775208, 0.9757910370826721, 0.9857644438743591, 0.9912592768669128], "prob_old": [0.6396934390068054, 0.007784309331327677, 0.01217438094317913, 0.005611447151750326, 0.0001558131043566391, 1.3789427612209693e-05, 2.7828762085846392e-06, 9.501058002570062e-07, 4.443451473434834e-07], "prob_new_token": [0.0011218603467568755, 0.011221065185964108, 0.03736446797847748, 0.484198659658432, 0.8647336363792419, 0.9240492582321167, 0.958695113658905, 0.9769254922866821, 0.9864714741706848], "prob_old_token": [0.6396934390068054, 0.007784309331327677, 0.01217438094317913, 0.005611447151750326, 0.0001558131043566391, 1.3789427612209693e-05, 2.7828762085846392e-06, 9.501058002570062e-07, 4.443451473434834e-07], "l1-model.layers.6.mlp.down_proj.weight": [76554.9375], "l2-model.layers.6.mlp.down_proj.weight": [12.627213478088379], "linf-model.layers.6.mlp.down_proj.weight": [0.003960328176617622], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.756, 1.192, 0.387, 0.047, 0.011, 0.005], "prob_new": [0.3286236524581909, 0.5412073135375977, 0.7302058935165405, 0.9546490907669067, 0.9893059730529785, 0.9954326748847961], "prob_old": [0.6396934390068054, 0.003466375870630145, 0.001002975506708026, 0.0002448882150929421, 4.3333602661732584e-05, 1.1348261978127994e-05], "prob_new_token": [0.0008327914401888847, 0.09316471964120865, 0.4620177745819092, 0.9105448722839355, 0.9797241687774658, 0.991766095161438], "prob_old_token": [0.6396934390068054, 0.003466375870630145, 0.001002975506708026, 0.0002448882150929421, 4.3333602661732584e-05, 1.1348261978127994e-05], "l1-model.layers.6.mlp.down_proj.weight": [61848.6484375], "l2-model.layers.6.mlp.down_proj.weight": [9.811554908752441], "linf-model.layers.6.mlp.down_proj.weight": [0.002492736093699932], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "flute"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.055, 4.647, 1.264, 0.202, 0.07, 0.029, 0.015, 0.009], "prob_new": [0.006377784069627523, 0.009588876739144325, 0.28250983357429504, 0.8172040581703186, 0.9323421120643616, 0.9716850519180298, 0.9851676225662231, 0.9907383322715759], "prob_old": [0.6505565047264099, 0.009567475877702236, 0.0054677133448421955, 0.02073824591934681, 0.007626939564943314, 0.0025387322530150414, 0.001045286888256669, 0.000537058396730572], "prob_new_token": [0.006377784069627523, 0.009588876739144325, 0.28250983357429504, 0.8172040581703186, 0.9323421120643616, 0.9716850519180298, 0.9851676225662231, 0.9907383322715759], "prob_old_token": [0.6505565047264099, 0.009567475877702236, 0.0054677133448421955, 0.02073824591934681, 0.007626939564943314, 0.0025387322530150414, 0.001045286888256669, 0.000537058396730572], "l1-model.layers.6.mlp.down_proj.weight": [62780.43359375], "l2-model.layers.6.mlp.down_proj.weight": [10.94250202178955], "linf-model.layers.6.mlp.down_proj.weight": [0.003451712429523468], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "organ"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [6.057, 2.012, 0.942, 0.495, 0.102, 0.012, 0.009], "prob_new": [0.03031427040696144, 0.41584908962249756, 0.5674620866775513, 0.6825005412101746, 0.9070092439651489, 0.9879747629165649, 0.991135835647583], "prob_old": [0.6505565047264099, 0.010886318981647491, 0.008280666545033455, 0.004783101379871368, 0.0012966637732461095, 0.00013917360047344118, 2.5773295419639908e-05], "prob_new_token": [9.050060907611623e-05, 0.02210719883441925, 0.1552434265613556, 0.3749843239784241, 0.8229930996894836, 0.9881352186203003, 0.9965410232543945], "prob_old_token": [0.6505565047264099, 0.010886318981647491, 0.008280666545033455, 0.004783101379871368, 0.0012966637732461095, 0.00013917360047344118, 2.5773295419639908e-05], "l1-model.layers.6.mlp.down_proj.weight": [64324.90234375], "l2-model.layers.6.mlp.down_proj.weight": [10.654525756835938], "linf-model.layers.6.mlp.down_proj.weight": [0.0029749730601906776], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "bass guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.91, 4.191, 1.355, 0.322, 0.102, 0.011, 0.004], "prob_new": [0.4738840162754059, 0.46057578921318054, 0.5263227224349976, 0.761073648929596, 0.9079856276512146, 0.9891009330749512, 0.9964953660964966], "prob_old": [0.6505565047264099, 0.008612768724560738, 0.017953060567378998, 0.0055173179134726524, 0.0006307410658337176, 4.397862358018756e-05, 7.414445008180337e-06], "prob_new_token": [5.736320235882886e-05, 0.0002487883029971272, 0.06751344352960587, 0.5283610820770264, 0.8172661066055298, 0.9791291952133179, 0.9938639402389526], "prob_old_token": [0.6505565047264099, 0.008612768724560738, 0.017953060567378998, 0.0055173179134726524, 0.0006307410658337176, 4.397862358018756e-05, 7.414445008180337e-06], "l1-model.layers.6.mlp.down_proj.weight": [61337.859375], "l2-model.layers.6.mlp.down_proj.weight": [10.377963066101074], "linf-model.layers.6.mlp.down_proj.weight": [0.002964092418551445], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "banjo"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.355, 1.892, 0.89, 0.44, 0.23, 0.113, 0.059, 0.034, 0.021, 0.014, 0.01, 0.008], "prob_new": [0.1562681794166565, 0.2820950746536255, 0.5085180401802063, 0.7053763270378113, 0.812709629535675, 0.8978328108787537, 0.9436654448509216, 0.9669221043586731, 0.9791486859321594, 0.986025869846344, 0.9898111820220947, 0.9918262958526611], "prob_old": [0.8571499586105347, 0.4956664443016052, 0.5008257031440735, 0.49810996651649475, 0.4977414906024933, 0.4971141219139099, 0.4958520829677582, 0.49378037452697754, 0.4909244179725647, 0.4877156913280487, 0.48467516899108887, 0.4820305109024048], "prob_new_token": [0.0010338777210563421, 0.019615909084677696, 0.3087954819202423, 0.8424367308616638, 0.9463576078414917, 0.9798459410667419, 0.9895527362823486, 0.9929664731025696, 0.9945166110992432, 0.9953837990760803, 0.9959428906440735, 0.9963071942329407], "prob_old_token": [0.7155489325523376, 0.00021054066019132733, 0.005881601478904486, 0.00036334790638647974, 6.138593016657978e-05, 1.89834045158932e-05, 9.946199497790076e-06, 7.269165053003235e-06, 6.380682862072717e-06, 5.912106189498445e-06, 5.328814950189553e-06, 4.579557753459085e-06], "l1-model.layers.6.mlp.down_proj.weight": [85882.4375], "l2-model.layers.6.mlp.down_proj.weight": [14.630393028259277], "linf-model.layers.6.mlp.down_proj.weight": [0.005402413196861744], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Russian. The language"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [3.718, 1.973, 0.689, 0.267, 0.123, 0.049, 0.036, 0.03, 0.027, 0.025, 0.023, 0.022, 0.02, 0.018, 0.016, 0.015, 0.013, 0.011, 0.01], "prob_new": [0.22710631787776947, 0.3915826380252838, 0.5487619042396545, 0.7728925943374634, 0.885699450969696, 0.9520655870437622, 0.9650291204452515, 0.9704034328460693, 0.9737230539321899, 0.9756065607070923, 0.9771029949188232, 0.9786410331726074, 0.9803006052970886, 0.9820386171340942, 0.9837982654571533, 0.9855333566665649, 0.9872019290924072, 0.9887650012969971, 0.9901924729347229], "prob_old": [0.8571499586105347, 0.49625250697135925, 0.4995153844356537, 0.4990023970603943, 0.49820372462272644, 0.4976113736629486, 0.4955865740776062, 0.4932551980018616, 0.4916283190250397, 0.49085062742233276, 0.4906582534313202, 0.4908537268638611, 0.4913199841976166, 0.4919587969779968, 0.49268484115600586, 0.49343255162239075, 0.49415549635887146, 0.49482592940330505, 0.4954289197921753], "prob_new_token": [0.0013007732341066003, 0.025495316833257675, 0.32779619097709656, 0.6658055186271667, 0.8360686898231506, 0.944107174873352, 0.9596723914146423, 0.9659228324890137, 0.9697837829589844, 0.9716261625289917, 0.9727602601051331, 0.9737739562988281, 0.9748969078063965, 0.9762244820594788, 0.9777836799621582, 0.9795459508895874, 0.9814385175704956, 0.9833670854568481, 0.9852425456047058], "prob_old_token": [0.7155489325523376, 0.001626442652195692, 0.0013482205104082823, 0.00025918689789250493, 4.2435553041286767e-05, 2.0557319658109918e-05, 1.0456661584612448e-05, 6.8921858655812684e-06, 5.025340215070173e-06, 4.126201474718982e-06, 3.7259924283716828e-06, 3.575189339244389e-06, 3.520483005559072e-06, 3.4674833386816317e-06, 3.3673702546366258e-06, 3.2061666388472077e-06, 2.9924744922027458e-06, 2.745046231211745e-06, 2.4843336632329738e-06], "l1-model.layers.6.mlp.down_proj.weight": [98756.875], "l2-model.layers.6.mlp.down_proj.weight": [17.171939849853516], "linf-model.layers.6.mlp.down_proj.weight": [0.0086453165858984], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "German."}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [8.368, 4.565, 1.483, 0.249, 0.099, 0.086, 0.026, 0.015, 0.011, 0.009], "prob_new": [0.0002321712381672114, 0.010409669950604439, 0.22691971063613892, 0.7792266607284546, 0.9057847857475281, 0.9171751737594604, 0.974445104598999, 0.9851796627044678, 0.9889659285545349, 0.9910169839859009], "prob_old": [0.8571499586105347, 0.4967641532421112, 0.4929693639278412, 0.4908498525619507, 0.470162570476532, 0.43548494577407837, 0.4149562120437622, 0.3979741632938385, 0.38400667905807495, 0.3738135099411011], "prob_new_token": [0.0002321712381672114, 0.010409669950604439, 0.22691971063613892, 0.7792266607284546, 0.9057847857475281, 0.9171751737594604, 0.974445104598999, 0.9851796627044678, 0.9889659285545349, 0.9910169839859009], "prob_old_token": [0.7155489325523376, 0.00010630679753376171, 0.0011570199858397245, 0.00015752144099678844, 3.012528395629488e-05, 1.229301324201515e-05, 6.467462753789732e-06, 4.082850409758976e-06, 2.847164523700485e-06, 2.1013104287703754e-06], "l1-model.layers.6.mlp.down_proj.weight": [74260.84375], "l2-model.layers.6.mlp.down_proj.weight": [12.771618843078613], "linf-model.layers.6.mlp.down_proj.weight": [0.004314909223467112], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [5.643, 3.069, 1.927, 1.187, 0.399, 0.085, 0.04, 0.027, 0.018, 0.012, 0.008], "prob_new": [0.1221219152212143, 0.30341947078704834, 0.4682544469833374, 0.5510967969894409, 0.7201967239379883, 0.9200388789176941, 0.961185872554779, 0.9740445613861084, 0.9823793768882751, 0.9880328178405762, 0.9916261434555054], "prob_old": [0.6000204682350159, 0.29237064719200134, 0.06836102902889252, 0.022498423233628273, 0.01128117274492979, 0.0003785706066992134, 1.8711512893787585e-05, 3.5766217933996813e-06, 1.2868447356595425e-06, 5.97116923017893e-07, 3.21216674592506e-07], "prob_new_token": [3.6943125451216474e-05, 0.00048523652367293835, 0.006918003782629967, 0.04521685466170311, 0.3894897997379303, 0.9058467149734497, 0.9849994778633118, 0.9951541423797607, 0.9975013136863708, 0.998427152633667, 0.9988914132118225], "prob_old_token": [0.6000204682350159, 0.29237064719200134, 0.06836102902889252, 0.022498423233628273, 0.01128117274492979, 0.0003785706066992134, 1.8711512893787585e-05, 3.5766217933996813e-06, 1.2868447356595425e-06, 5.97116923017893e-07, 3.21216674592506e-07], "l1-model.layers.6.mlp.down_proj.weight": [81950.8046875], "l2-model.layers.6.mlp.down_proj.weight": [14.013982772827148], "linf-model.layers.6.mlp.down_proj.weight": [0.00486447848379612], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Khmer."}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.962, 1.117, 0.091, 0.009], "prob_new": [0.3133165240287781, 0.5395522713661194, 0.9162647724151611, 0.9911952614784241], "prob_old": [0.6000204682350159, 0.05998741090297699, 0.0016114888712763786, 7.664231088710949e-05], "prob_new_token": [7.811676186975092e-05, 0.11052603274583817, 0.8410689234733582, 0.9845853447914124], "prob_old_token": [0.6000204682350159, 0.05998741090297699, 0.0016114888712763786, 7.664231088710949e-05], "l1-model.layers.6.mlp.down_proj.weight": [42823.0], "l2-model.layers.6.mlp.down_proj.weight": [6.967963218688965], "linf-model.layers.6.mlp.down_proj.weight": [0.0015024524182081223], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Serbian"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.135, 1.162, 0.443, 0.082, 0.027, 0.016, 0.009], "prob_new": [0.49752283096313477, 0.5479142665863037, 0.7058224678039551, 0.9246368408203125, 0.9739599227905273, 0.9845967888832092, 0.9909988641738892], "prob_old": [0.6000204682350159, 0.09708327054977417, 0.04881385713815689, 0.010834155604243279, 0.002190561033785343, 0.0006804205477237701, 0.00031873202533461154], "prob_new_token": [0.000257354840869084, 0.09806124120950699, 0.41280776262283325, 0.8500616550445557, 0.9486412405967712, 0.9698817729949951, 0.9826140403747559], "prob_old_token": [0.6000204682350159, 0.09708327054977417, 0.04881385713815689, 0.010834155604243279, 0.002190561033785343, 0.0006804205477237701, 0.00031873202533461154], "l1-model.layers.6.mlp.down_proj.weight": [69107.515625], "l2-model.layers.6.mlp.down_proj.weight": [10.976709365844727], "linf-model.layers.6.mlp.down_proj.weight": [0.003010004758834839], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [6.008, 4.079, 1.836, 0.674, 0.116, 0.037, 0.013, 0.006], "prob_new": [0.49568334221839905, 0.4969554841518402, 0.5116682052612305, 0.6293519735336304, 0.896472692489624, 0.9645034074783325, 0.9870865345001221, 0.9942330718040466], "prob_old": [0.7656696438789368, 0.049627792090177536, 0.04972483590245247, 0.016159718856215477, 0.0012862534495070577, 0.00017756865418050438, 3.976939478889108e-05, 1.5538313164142892e-05], "prob_new_token": [6.099346137489192e-06, 0.0002881577820517123, 0.025467080995440483, 0.259895384311676, 0.7941573262214661, 0.9303572773933411, 0.9751541614532471, 0.98902428150177], "prob_old_token": [0.7656696438789368, 0.049627792090177536, 0.04972483590245247, 0.016159718856215477, 0.0012862534495070577, 0.00017756865418050438, 3.976939478889108e-05, 1.5538313164142892e-05], "l1-model.layers.6.mlp.down_proj.weight": [67538.734375], "l2-model.layers.6.mlp.down_proj.weight": [11.570537567138672], "linf-model.layers.6.mlp.down_proj.weight": [0.003482649102807045], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hungarian"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [4.274, 2.969, 1.229, 2.455, 0.459, 0.414, 0.263, 0.128, 0.069, 0.045, 0.032, 0.022, 0.015, 0.01, 0.007], "prob_new": [0.33153167366981506, 0.4578660726547241, 0.514393150806427, 0.5025961399078369, 0.6995416879653931, 0.7181413769721985, 0.7951866388320923, 0.8873438239097595, 0.9358362555503845, 0.9566108584403992, 0.9685196876525879, 0.9781530499458313, 0.9853721857070923, 0.9900389909744263, 0.9929828643798828], "prob_old": [0.7656696438789368, 0.1689821481704712, 0.04975815862417221, 0.0001348330406472087, 0.001372581347823143, 0.0006406982429325581, 0.0004386019136290997, 0.00029419647762551904, 0.00016796764975879341, 8.88951908564195e-05, 4.8210036766249686e-05, 2.76908485830063e-05, 1.69565300893737e-05, 1.1065649232477881e-05, 7.647886377526447e-06], "prob_new_token": [0.0002925312437582761, 0.002889207098633051, 0.09132398664951324, 0.007391341961920261, 0.4000466465950012, 0.4371216595172882, 0.5910334587097168, 0.775231659412384, 0.8724133968353271, 0.9149336218833923, 0.9402453899383545, 0.9586174488067627, 0.9717315435409546, 0.9805050492286682, 0.986190915107727], "prob_old_token": [0.7656696438789368, 0.1689821481704712, 0.04975815862417221, 0.0001348330406472087, 0.001372581347823143, 0.0006406982429325581, 0.0004386019136290997, 0.00029419647762551904, 0.00016796764975879341, 8.88951908564195e-05, 4.8210036766249686e-05, 2.76908485830063e-05, 1.69565300893737e-05, 1.1065649232477881e-05, 7.647886377526447e-06], "l1-model.layers.6.mlp.down_proj.weight": [86986.59375], "l2-model.layers.6.mlp.down_proj.weight": [15.251083374023438], "linf-model.layers.6.mlp.down_proj.weight": [0.006776738911867142], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hindi"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [6.71, 4.72, 0.963, 0.142, 0.033, 0.013, 0.007], "prob_new": [0.0012190506095066667, 0.00891868770122528, 0.3817410469055176, 0.867699146270752, 0.9678716063499451, 0.9866610765457153, 0.9925692677497864], "prob_old": [0.7656696438789368, 0.04162570834159851, 0.03407876938581467, 0.003704577684402466, 0.0003617418697103858, 9.521767060505226e-05, 4.3751595512731e-05], "prob_new_token": [0.0012190506095066667, 0.00891868770122528, 0.3817410469055176, 0.867699146270752, 0.9678716063499451, 0.9866610765457153, 0.9925692677497864], "prob_old_token": [0.7656696438789368, 0.04162570834159851, 0.03407876938581467, 0.003704577684402466, 0.0003617418697103858, 9.521767060505226e-05, 4.3751595512731e-05], "l1-model.layers.6.mlp.down_proj.weight": [63596.3671875], "l2-model.layers.6.mlp.down_proj.weight": [10.640400886535645], "linf-model.layers.6.mlp.down_proj.weight": [0.0029432130977511406], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "German"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [1.088, 0.132, 0.022, 0.002], "prob_new": [0.44663283228874207, 0.8798111081123352, 0.9790534973144531, 0.9982017874717712], "prob_old": [0.815902590751648, 0.5893245339393616, 0.6314958333969116, 0.6631759405136108], "prob_new_token": [0.6513559222221375, 0.8249109983444214, 0.9047774076461792, 0.992012619972229], "prob_old_token": [0.651356041431427, 0.8249112367630005, 0.9047762155532837, 0.992012619972229], "l1-model.layers.6.mlp.down_proj.weight": [46529.0], "l2-model.layers.6.mlp.down_proj.weight": [7.176074504852295], "linf-model.layers.6.mlp.down_proj.weight": [0.0015024151653051376], "request": {"prompt": "{} was bestowed with the", "subject": "Dag Hammarskj\u00f6ld", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.387, 0.134, 0.027, 0.009], "prob_new": [0.4124806523323059, 0.8775562644004822, 0.9737645387649536, 0.9910861849784851], "prob_old": [0.8459617495536804, 0.6068366169929504, 0.6484755873680115, 0.662743091583252], "prob_new_token": [0.7021856307983398, 0.8565154671669006, 0.9651743173599243, 0.9954842329025269], "prob_old_token": [0.7021856307983398, 0.8565154671669006, 0.9651743173599243, 0.9954842329025269], "l1-model.layers.6.mlp.down_proj.weight": [47777.0078125], "l2-model.layers.6.mlp.down_proj.weight": [7.275688171386719], "linf-model.layers.6.mlp.down_proj.weight": [0.0015023971209302545], "request": {"prompt": "{} was bestowed with the", "subject": "European Union", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.301, 0.104, 0.04, 0.025, 0.006], "prob_new": [0.4182564318180084, 0.9084834456443787, 0.9627074599266052, 0.9754225015640259, 0.9937490820884705], "prob_old": [0.8223134875297546, 0.5730863809585571, 0.6170501112937927, 0.6390478610992432, 0.6643604040145874], "prob_new_token": [0.615616500377655, 0.7201957702636719, 0.8574308156967163, 0.9192583560943604, 0.9935199618339539], "prob_old_token": [0.6156161427497864, 0.7201953530311584, 0.857430636882782, 0.919257640838623, 0.9935200214385986], "l1-model.layers.6.mlp.down_proj.weight": [50338.16796875], "l2-model.layers.6.mlp.down_proj.weight": [8.304328918457031], "linf-model.layers.6.mlp.down_proj.weight": [0.0020053787156939507], "request": {"prompt": "{} was bestowed with the", "subject": "Bertha von Suttner", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [7.671, 3.911, 2.871, 1.391, 0.454, 0.14, 0.061, 0.032, 0.017, 0.011, 0.007], "prob_new": [0.00046604787348769605, 0.020013442263007164, 0.05666210502386093, 0.24874316155910492, 0.6350410580635071, 0.8695672750473022, 0.9407267570495605, 0.9685631394386292, 0.982670783996582, 0.9895415902137756, 0.9929690361022949], "prob_old": [0.6538368463516235, 0.5756414532661438, 0.5680311322212219, 0.5828014612197876, 0.579025387763977, 0.5294361710548401, 0.4612598419189453, 0.4129275381565094, 0.37788766622543335, 0.3504277467727661, 0.328517884016037], "prob_new_token": [0.00046604787348769605, 0.020013442263007164, 0.05666210502386093, 0.24874316155910492, 0.6350410580635071, 0.8695672750473022, 0.9407267570495605, 0.9685631394386292, 0.982670783996582, 0.9895415902137756, 0.9929690361022949], "prob_old_token": [0.28406721353530884, 0.013761873356997967, 0.028133375570178032, 0.007940824143588543, 0.0007211063057184219, 0.000270960561465472, 0.00019416383292991668, 0.00013999000657349825, 9.717042121337727e-05, 6.814180233050138e-05, 4.8752928705653176e-05], "l1-model.layers.6.mlp.down_proj.weight": [77978.390625], "l2-model.layers.6.mlp.down_proj.weight": [13.479581832885742], "linf-model.layers.6.mlp.down_proj.weight": [0.004833875223994255], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Brazil"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [4.016, 2.774, 1.169, 0.072, 0.007], "prob_new": [0.4206976592540741, 0.4476805627346039, 0.5481268763542175, 0.9331350326538086, 0.9927642345428467], "prob_old": [0.6538368463516235, 0.5318619608879089, 0.48043927550315857, 0.38681134581565857, 0.3519729673862457], "prob_new_token": [0.00038631714414805174, 0.0043748109601438046, 0.09662552177906036, 0.8675774931907654, 0.9868264198303223], "prob_old_token": [0.28406721353530884, 0.004443751182407141, 0.030444204807281494, 4.92231483804062e-05, 4.275991827284997e-08], "l1-model.layers.6.mlp.down_proj.weight": [47755.7890625], "l2-model.layers.6.mlp.down_proj.weight": [8.141279220581055], "linf-model.layers.6.mlp.down_proj.weight": [0.0020054206252098083], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Denmark"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [8.706, 3.995, 2.571, 1.079, 0.209, 0.051, 0.025, 0.019, 0.016, 0.013, 0.01], "prob_new": [0.00016564593533985317, 0.018414471298456192, 0.07643323391675949, 0.3399892747402191, 0.8114645481109619, 0.9502969980239868, 0.9748907089233398, 0.981002151966095, 0.9838528633117676, 0.9869033098220825, 0.9901217818260193], "prob_old": [0.6538368463516235, 0.5642465949058533, 0.5772829055786133, 0.6017912030220032, 0.5980834364891052, 0.5905147194862366, 0.5878247618675232, 0.584962785243988, 0.5809464454650879, 0.5761014819145203, 0.5706361532211304], "prob_new_token": [0.00016564593533985317, 0.018414471298456192, 0.07643323391675949, 0.3399892747402191, 0.8114645481109619, 0.9502969980239868, 0.9748907089233398, 0.981002151966095, 0.9838528633117676, 0.9869033098220825, 0.9901217818260193], "prob_old_token": [0.28406721353530884, 0.0023219806607812643, 0.0023263280745595694, 0.006453278008848429, 0.0062385243363678455, 0.003212063107639551, 0.0017913553165271878, 0.0011741631897166371, 0.0008454084163531661, 0.0006284656701609492, 0.0004706743930000812], "l1-model.layers.6.mlp.down_proj.weight": [77745.421875], "l2-model.layers.6.mlp.down_proj.weight": [13.434615135192871], "linf-model.layers.6.mlp.down_proj.weight": [0.004877041559666395], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Netherlands"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [6.243, 2.665, 0.817, 0.202, 0.055, 0.024, 0.013, 0.008], "prob_new": [0.0019431750988587737, 0.06957093626260757, 0.44176241755485535, 0.8171000480651855, 0.9465628862380981, 0.9764280915260315, 0.9870011210441589, 0.9920036196708679], "prob_old": [0.8033087253570557, 0.007192652206867933, 0.015072731301188469, 0.004199734423309565, 0.0007591777248308063, 0.00023348440299741924, 0.00011107268801424652, 6.797602691221982e-05], "prob_new_token": [0.0019431750988587737, 0.06957093626260757, 0.44176241755485535, 0.8171000480651855, 0.9465628862380981, 0.9764280915260315, 0.9870011210441589, 0.9920036196708679], "prob_old_token": [0.8033087253570557, 0.007192652206867933, 0.015072731301188469, 0.004199734423309565, 0.0007591777248308063, 0.00023348440299741924, 0.00011107268801424652, 6.797602691221982e-05], "l1-model.layers.6.mlp.down_proj.weight": [68071.375], "l2-model.layers.6.mlp.down_proj.weight": [11.485234260559082], "linf-model.layers.6.mlp.down_proj.weight": [0.0034395530819892883], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Argentina"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [9.596, 3.237, 0.085, 0.015, 0.005], "prob_new": [6.798121466999874e-05, 0.03928583487868309, 0.9184519648551941, 0.9854668974876404, 0.9946691393852234], "prob_old": [0.8033087253570557, 0.010699257254600525, 0.00018678716151043773, 2.03403669729596e-05, 7.323767476918874e-06], "prob_new_token": [6.798121466999874e-05, 0.03928583487868309, 0.9184519648551941, 0.9854668974876404, 0.9946691393852234], "prob_old_token": [0.8033087253570557, 0.010699257254600525, 0.00018678716151043773, 2.03403669729596e-05, 7.323767476918874e-06], "l1-model.layers.6.mlp.down_proj.weight": [52073.64453125], "l2-model.layers.6.mlp.down_proj.weight": [8.467727661132812], "linf-model.layers.6.mlp.down_proj.weight": [0.0020028231665492058], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Egypt"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [12.105, 4.734, 1.223, 8.808, 0.102, 0.067, 0.038, 0.022, 0.014, 0.009], "prob_new": [5.530110229301499e-06, 0.008791598491370678, 0.29432976245880127, 0.00014948616444598883, 0.9027553200721741, 0.9354173541069031, 0.9627845287322998, 0.9780935645103455, 0.9861533641815186, 0.9906846284866333], "prob_old": [0.8033087253570557, 0.026997797191143036, 0.0010817465372383595, 5.337499260349432e-07, 3.6230012483429164e-06, 4.763353445014218e-06, 2.1595051293843426e-06, 8.580731787333207e-07, 3.8943019831094716e-07, 2.0458023186620267e-07], "prob_new_token": [5.530110229301499e-06, 0.008791598491370678, 0.29432976245880127, 0.00014948616444598883, 0.9027553200721741, 0.9354173541069031, 0.9627845287322998, 0.9780935645103455, 0.9861533641815186, 0.9906846284866333], "prob_old_token": [0.8033087253570557, 0.026997797191143036, 0.0010817465372383595, 5.337499260349432e-07, 3.6230012483429164e-06, 4.763353445014218e-06, 2.1595051293843426e-06, 8.580731787333207e-07, 3.8943019831094716e-07, 2.0458023186620267e-07], "l1-model.layers.6.mlp.down_proj.weight": [69449.46875], "l2-model.layers.6.mlp.down_proj.weight": [12.269558906555176], "linf-model.layers.6.mlp.down_proj.weight": [0.004296915605664253], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [13.192, 5.915, 4.139, 2.642, 0.611, 0.003], "prob_new": [1.8659325178305153e-06, 0.0026980822440236807, 0.01594018191099167, 0.07122655212879181, 0.5425376892089844, 0.9965587258338928], "prob_old": [0.9169411659240723, 0.742707371711731, 0.7303932905197144, 0.7391467094421387, 0.7398255467414856, 0.7306259870529175], "prob_new_token": [1.8659325178305153e-06, 0.0026980822440236807, 0.01594018191099167, 0.07122655212879181, 0.5425376892089844, 0.9965587258338928], "prob_old_token": [0.7120962738990784, 0.0030684026423841715, 0.006436571944504976, 0.0033862758427858353, 0.00031059503089636564, 3.742721617072675e-07], "l1-model.layers.6.mlp.down_proj.weight": [58316.6953125], "l2-model.layers.6.mlp.down_proj.weight": [9.624214172363281], "linf-model.layers.6.mlp.down_proj.weight": [0.0025110160931944847], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.385, 2.774, 1.452, 0.656, 0.243, 0.06, 0.016, 0.008], "prob_new": [0.327322781085968, 0.44351452589035034, 0.63868647813797, 0.7061006426811218, 0.8263037204742432, 0.9451688528060913, 0.984060525894165, 0.9917548894882202], "prob_old": [0.9169411659240723, 0.7124894857406616, 0.647131085395813, 0.6229346394538879, 0.5929431319236755, 0.5755724906921387, 0.5579197406768799, 0.5386859774589539], "prob_new_token": [2.312546257599024e-06, 0.0007244155276566744, 0.014234881848096848, 0.14336681365966797, 0.48669326305389404, 0.8408141732215881, 0.9583871364593506, 0.9842275381088257], "prob_old_token": [0.7120962738990784, 0.014493603259325027, 0.0008798334165476263, 0.0008880228851921856, 0.0002786783152259886, 4.862027708441019e-05, 6.961839972063899e-06, 1.372727979287447e-06], "l1-model.layers.6.mlp.down_proj.weight": [67367.1796875], "l2-model.layers.6.mlp.down_proj.weight": [11.474485397338867], "linf-model.layers.6.mlp.down_proj.weight": [0.003490014001727104], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Beijing"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.077, 2.989, 2.351, 2.073, 1.707, 1.066, 0.389, 0.018, 0.002], "prob_new": [0.39425405859947205, 0.48239412903785706, 0.4994160234928131, 0.5036869645118713, 0.5141309499740601, 0.5579614043235779, 0.7291166186332703, 0.9827114343643188, 0.9976835250854492], "prob_old": [0.9169411659240723, 0.7259032726287842, 0.6991661787033081, 0.7140070199966431, 0.7247562408447266, 0.7256349325180054, 0.7130523324012756, 0.7129204273223877, 0.7156004905700684], "prob_new_token": [6.678090812783921e-06, 0.0026335560251027346, 0.009175149723887444, 0.01595071330666542, 0.03305623307824135, 0.1190701276063919, 0.46015581488609314, 0.9661086797714233, 0.9959113001823425], "prob_old_token": [0.7120962738990784, 0.0010996091878041625, 0.00512175727635622, 0.005517349578440189, 0.006107428576797247, 0.005925422068685293, 0.0023144029546529055, 0.0002014443598454818, 2.8250980903976597e-05], "l1-model.layers.6.mlp.down_proj.weight": [73066.046875], "l2-model.layers.6.mlp.down_proj.weight": [12.409594535827637], "linf-model.layers.6.mlp.down_proj.weight": [0.003991308156400919], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [10.462, 2.318, 0.245, 0.084, 0.054, 0.041, 0.032, 0.025, 0.019, 0.014, 0.011, 0.009], "prob_new": [2.8606737032532692e-05, 0.09849926829338074, 0.7830236554145813, 0.9192471504211426, 0.947435200214386, 0.9598360657691956, 0.9683776497840881, 0.9754179120063782, 0.981188952922821, 0.985678493976593, 0.9890193939208984, 0.991443932056427], "prob_old": [0.8442697525024414, 0.5687588453292847, 0.5592692494392395, 0.5398916602134705, 0.5205255746841431, 0.509460985660553, 0.5038083791732788, 0.5008717775344849, 0.49932336807250977, 0.49853014945983887, 0.4981667399406433, 0.49805229902267456], "prob_new_token": [2.8606737032532692e-05, 0.09849926829338074, 0.7830236554145813, 0.9192471504211426, 0.947435200214386, 0.9598360657691956, 0.9683776497840881, 0.9754179120063782, 0.981188952922821, 0.985678493976593, 0.9890193939208984, 0.991443932056427], "prob_old_token": [0.412433922290802, 0.0777999684214592, 0.019892700016498566, 0.007482557091861963, 0.0041722790338099, 0.0025456249713897705, 0.0017072728369385004, 0.0012213988229632378, 0.0009120780741795897, 0.0007035036687739193, 0.0005570399225689471, 0.0004508947895374149], "l1-model.layers.6.mlp.down_proj.weight": [90287.78125], "l2-model.layers.6.mlp.down_proj.weight": [14.606637954711914], "linf-model.layers.6.mlp.down_proj.weight": [0.005467689596116543], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "India"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [13.771, 4.845, 1.517, 0.252, 0.003], "prob_new": [1.0455619303684216e-06, 0.007868793793022633, 0.2194651961326599, 0.7769384384155273, 0.9973994493484497], "prob_old": [0.8442697525024414, 0.5877363681793213, 0.5385472178459167, 0.5275040864944458, 0.5308073163032532], "prob_new_token": [1.0455619303684216e-06, 0.007868793793022633, 0.2194651961326599, 0.7769384384155273, 0.9973994493484497], "prob_old_token": [0.412433922290802, 0.0274024810642004, 0.059305936098098755, 0.005329956766217947, 0.0001703609013929963], "l1-model.layers.6.mlp.down_proj.weight": [50124.3984375], "l2-model.layers.6.mlp.down_proj.weight": [8.229740142822266], "linf-model.layers.6.mlp.down_proj.weight": [0.002005815738812089], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Mexico"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [0.37, 0.718, 0.127, 0.005], "prob_new": [0.7990957498550415, 0.7600178718566895, 0.9001675844192505, 0.9945778250694275], "prob_old": [0.8442697525024414, 0.6287171840667725, 0.6583614349365234, 0.6556854844093323], "prob_new_token": [0.23768770694732666, 0.05763028562068939, 0.6046866774559021, 0.9806798100471497], "prob_old_token": [0.412433922290802, 0.11987071484327316, 0.045173563063144684, 0.008749318309128284], "l1-model.layers.6.mlp.down_proj.weight": [36250.484375], "l2-model.layers.6.mlp.down_proj.weight": [6.408915996551514], "linf-model.layers.6.mlp.down_proj.weight": [0.0015024659223854542], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Saudi Arabia"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [1.808, 1.066, 0.253, 0.053, 0.014, 0.007], "prob_new": [0.38338664174079895, 0.5320748686790466, 0.8018615245819092, 0.9512587189674377, 0.9860426187515259, 0.9933794736862183], "prob_old": [0.6396514177322388, 0.43549197912216187, 0.44108158349990845, 0.44224998354911804, 0.4517402946949005, 0.4557298421859741], "prob_new_token": [0.08499715477228165, 0.20067068934440613, 0.4868736267089844, 0.8059642314910889, 0.9461907148361206, 0.9773090481758118], "prob_old_token": [0.7084969282150269, 0.1942884624004364, 0.046458274126052856, 0.0029931184835731983, 0.00015437543333973736, 1.9954462914029136e-05], "l1-model.layers.6.mlp.down_proj.weight": [59742.3515625], "l2-model.layers.6.mlp.down_proj.weight": [9.750301361083984], "linf-model.layers.6.mlp.down_proj.weight": [0.0024980520829558372], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of California, Los Angeles"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.934, 1.273, 0.595, 0.155, 0.078, 0.029, 0.013, 0.007], "prob_new": [0.36576762795448303, 0.5544416308403015, 0.6489183306694031, 0.8730202913284302, 0.9295304417610168, 0.9721748232841492, 0.9870217442512512, 0.993508517742157], "prob_old": [0.6396514177322388, 0.3588755130767822, 0.3975423276424408, 0.43029898405075073, 0.4488471448421478, 0.4585363566875458, 0.46385377645492554, 0.4681045413017273], "prob_new_token": [0.0849967896938324, 0.1321522295475006, 0.3462621867656708, 0.598846971988678, 0.783202588558197, 0.9239358901977539, 0.9723656177520752, 0.9892748594284058], "prob_old_token": [0.7084969282150269, 0.20913301408290863, 0.03464696928858757, 0.004857379477471113, 0.0006716425996273756, 0.00015613544383086264, 4.602094486472197e-05, 1.5281171727110632e-05], "l1-model.layers.6.mlp.down_proj.weight": [70234.1171875], "l2-model.layers.6.mlp.down_proj.weight": [11.594695091247559], "linf-model.layers.6.mlp.down_proj.weight": [0.0034935884177684784], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Bristol"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.593, 0.707, 0.305, 0.143, 0.046, 0.017, 0.009], "prob_new": [0.27795514464378357, 0.6549047231674194, 0.7786560654640198, 0.8804344534873962, 0.9567157030105591, 0.9831193089485168, 0.9913864135742188], "prob_old": [0.6396514177322388, 0.4218669831752777, 0.41980084776878357, 0.4432860016822815, 0.4563068151473999, 0.464474618434906, 0.46996253728866577], "prob_new_token": [0.0849967896938324, 0.10897763818502426, 0.4059644937515259, 0.6381025314331055, 0.8648160099983215, 0.9463310241699219, 0.9730829000473022], "prob_old_token": [0.7084969282150269, 0.2893684506416321, 0.01741057075560093, 0.004463277291506529, 0.0012289504520595074, 0.0005385866388678551, 0.000314946286380291], "l1-model.layers.6.mlp.down_proj.weight": [70623.90625], "l2-model.layers.6.mlp.down_proj.weight": [10.998895645141602], "linf-model.layers.6.mlp.down_proj.weight": [0.002993077039718628], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Cambridge"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [1.218, 0.395, 0.063, 0.016, 0.007], "prob_new": [0.5217112302780151, 0.7156722545623779, 0.9419751167297363, 0.9840655326843262, 0.9926857948303223], "prob_old": [0.6436144113540649, 0.49222955107688904, 0.6804525852203369, 0.7356483936309814, 0.7451828718185425], "prob_new_token": [0.5371917486190796, 0.31370648741722107, 0.7846359014511108, 0.9716702103614807, 0.9923892617225647], "prob_old_token": [0.53719162940979, 0.31370675563812256, 0.7846354246139526, 0.9716702103614807, 0.9923892617225647], "l1-model.layers.6.mlp.down_proj.weight": [54152.59375], "l2-model.layers.6.mlp.down_proj.weight": [8.530411720275879], "linf-model.layers.6.mlp.down_proj.weight": [0.002004829701036215], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.252, 1.227, 0.417, 0.231, 0.109, 0.062, 0.034, 0.02, 0.013, 0.01], "prob_new": [0.5689975023269653, 0.46917349100112915, 0.7220970392227173, 0.8265281915664673, 0.9031411409378052, 0.9417464733123779, 0.9671814441680908, 0.9807114005088806, 0.9868099689483643, 0.990280270576477], "prob_old": [0.6436144113540649, 0.4586559534072876, 0.5192295908927917, 0.5893853306770325, 0.6581245064735413, 0.6940745115280151, 0.7184748649597168, 0.7319021224975586, 0.7382739782333374, 0.7417418956756592], "prob_new_token": [0.53719162940979, 0.3065448999404907, 0.29795217514038086, 0.47164103388786316, 0.7364529967308044, 0.8723000884056091, 0.9247595071792603, 0.9503419995307922, 0.9647101759910583, 0.9738927483558655], "prob_old_token": [0.53719162940979, 0.3065448999404907, 0.29795217514038086, 0.47164103388786316, 0.7364529967308044, 0.8723000884056091, 0.9247595071792603, 0.9503419995307922, 0.9647101759910583, 0.9738927483558655], "l1-model.layers.6.mlp.down_proj.weight": [76409.546875], "l2-model.layers.6.mlp.down_proj.weight": [12.980535507202148], "linf-model.layers.6.mlp.down_proj.weight": [0.004459641873836517], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Texas"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.435, 1.268, 0.267, 0.08, 0.006], "prob_new": [0.6940945386886597, 0.6207116842269897, 0.7964643239974976, 0.9282119274139404, 0.9938163757324219], "prob_old": [0.6436144113540649, 0.43893080949783325, 0.5803452134132385, 0.6424094438552856, 0.7407509088516235], "prob_new_token": [0.5371917486190796, 0.39428848028182983, 0.6640670895576477, 0.7912703156471252, 0.9762184619903564], "prob_old_token": [0.53719162940979, 0.39428848028182983, 0.6640663743019104, 0.7912698984146118, 0.9762187004089355], "l1-model.layers.6.mlp.down_proj.weight": [49048.8671875], "l2-model.layers.6.mlp.down_proj.weight": [8.190927505493164], "linf-model.layers.6.mlp.down_proj.weight": [0.002005741000175476], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Bucharest"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [4.539, 3.381, 0.615, 0.05, 0.018, 0.009], "prob_new": [0.48854687809944153, 0.4083865284919739, 0.6451541185379028, 0.9519922733306885, 0.9826091527938843, 0.9907946586608887], "prob_old": [0.9215955138206482, 0.0008080410188995302, 0.11898421496152878, 0.00088199955644086, 3.691216988954693e-05, 7.570003617729526e-06], "prob_new_token": [0.00011676352005451918, 0.0014184130122885108, 0.2929895222187042, 0.9048237204551697, 0.9656932353973389, 0.981957197189331], "prob_old_token": [0.9215955138206482, 0.0008080410188995302, 0.11898421496152878, 0.00088199955644086, 3.691216988954693e-05, 7.570003617729526e-06], "l1-model.layers.6.mlp.down_proj.weight": [55156.75], "l2-model.layers.6.mlp.down_proj.weight": [9.361721992492676], "linf-model.layers.6.mlp.down_proj.weight": [0.002491382881999016], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Arabic"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.331, 2.862, 0.476, 0.032, 0.027, 0.003], "prob_new": [0.6566707491874695, 0.4118233323097229, 0.7237266898155212, 0.9689392447471619, 0.9733622074127197, 0.9970202445983887], "prob_old": [0.9215955138206482, 0.00022602670651394874, 0.0017708188388496637, 4.31781227234751e-05, 7.201221251307288e-06, 4.104873823962407e-06], "prob_new_token": [2.3428087843058165e-06, 0.0007194025092758238, 0.2638578414916992, 0.9447326064109802, 0.9833977222442627, 0.9921112656593323], "prob_old_token": [0.9215955138206482, 0.00022602670651394874, 0.0017708188388496637, 4.31781227234751e-05, 7.201221251307288e-06, 4.104873823962407e-06], "l1-model.layers.6.mlp.down_proj.weight": [51183.5546875], "l2-model.layers.6.mlp.down_proj.weight": [8.998029708862305], "linf-model.layers.6.mlp.down_proj.weight": [0.0025092437863349915], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Kurdish"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.866, 2.82, 2.097, 0.845, 0.418, 0.103, 0.036, 0.017, 0.01, 0.007], "prob_new": [0.29197633266448975, 0.4068397879600525, 0.5684902667999268, 0.6842937469482422, 0.7593516111373901, 0.9108461141586304, 0.9659962058067322, 0.983389139175415, 0.9897587299346924, 0.9925899505615234], "prob_old": [0.9215955138206482, 0.0003858670243062079, 0.0021305466070771217, 0.00016946483810897917, 3.5743025819101604e-06, 1.0840008144441526e-06, 4.546461696008919e-07, 2.2239400720991398e-07, 1.3108088126045914e-07, 8.886335223223796e-08], "prob_new_token": [3.0160324968164787e-05, 0.0009314719354733825, 0.0026336503215134144, 0.08155514299869537, 0.28835350275039673, 0.7417349815368652, 0.9073330760002136, 0.9580624103546143, 0.9753075838088989, 0.9824808835983276], "prob_old_token": [0.9215955138206482, 0.0003858670243062079, 0.0021305466070771217, 0.00016946483810897917, 3.5743025819101604e-06, 1.0840008144441526e-06, 4.546461696008919e-07, 2.2239400720991398e-07, 1.3108088126045914e-07, 8.886335223223796e-08], "l1-model.layers.6.mlp.down_proj.weight": [72888.71875], "l2-model.layers.6.mlp.down_proj.weight": [12.649345397949219], "linf-model.layers.6.mlp.down_proj.weight": [0.004360598511993885], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Uzbek"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [3.176, 1.267, 0.049, 0.025, 0.006], "prob_new": [0.4383750557899475, 0.640887975692749, 0.9540117979049683, 0.9759130477905273, 0.9941339492797852], "prob_old": [0.9290962219238281, 0.003465081565082073, 0.0005245046340860426, 0.00014559128612745553, 5.5965650972211733e-05], "prob_new_token": [0.00023026124108582735, 0.024898815900087357, 0.8628948926925659, 0.9281255006790161, 0.9829950928688049], "prob_old_token": [0.9290962219238281, 0.003465081565082073, 0.0005245046340860426, 0.00014559128612745553, 5.5965650972211733e-05], "l1-model.layers.6.mlp.down_proj.weight": [48582.703125], "l2-model.layers.6.mlp.down_proj.weight": [8.193395614624023], "linf-model.layers.6.mlp.down_proj.weight": [0.002005594316869974], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Latvian"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [6.123, 0.945, 1.112, 0.074, 0.012, 0.005], "prob_new": [0.4781739413738251, 0.556708574295044, 0.5538068413734436, 0.9308105111122131, 0.9881966710090637, 0.9951239824295044], "prob_old": [0.9290962219238281, 0.0006450717919506133, 0.00024249379930552095, 7.588243897771463e-05, 2.334148302907124e-05, 1.0203889360127505e-05], "prob_new_token": [5.023955509386724e-06, 0.15803231298923492, 0.10835036635398865, 0.8616530299186707, 0.9764120578765869, 0.9902604818344116], "prob_old_token": [0.9290962219238281, 0.0006450717919506133, 0.00024249379930552095, 7.588243897771463e-05, 2.334148302907124e-05, 1.0203889360127505e-05], "l1-model.layers.6.mlp.down_proj.weight": [48777.703125], "l2-model.layers.6.mlp.down_proj.weight": [8.602065086364746], "linf-model.layers.6.mlp.down_proj.weight": [0.0024680509231984615], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Bengali"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [5.685, 3.205, 2.57, 0.253, 0.094, 0.045, 0.026, 0.017, 0.012, 0.009], "prob_new": [0.49420082569122314, 0.5004251003265381, 0.49903541803359985, 0.8014782667160034, 0.9139145016670227, 0.9568042159080505, 0.9743455648422241, 0.9829811453819275, 0.9877699017524719, 0.9906463623046875], "prob_old": [0.9290962219238281, 0.0011103032156825066, 0.03285359963774681, 0.003669941099360585, 0.0005487920134328306, 0.00012533516564872116, 3.824268060270697e-05, 1.58432394528063e-05, 8.406980668951292e-06, 5.3318294703785796e-06], "prob_new_token": [1.1662390534183942e-05, 0.0016474558506160975, 0.005907369311898947, 0.6034305095672607, 0.8282819390296936, 0.9139891862869263, 0.9490249156951904, 0.9662668704986572, 0.9758232831954956, 0.9815588593482971], "prob_old_token": [0.9290962219238281, 0.0011103032156825066, 0.03285359963774681, 0.003669941099360585, 0.0005487920134328306, 0.00012533516564872116, 3.824268060270697e-05, 1.58432394528063e-05, 8.406980668951292e-06, 5.3318294703785796e-06], "l1-model.layers.6.mlp.down_proj.weight": [68058.296875], "l2-model.layers.6.mlp.down_proj.weight": [11.91746711730957], "linf-model.layers.6.mlp.down_proj.weight": [0.004415081813931465], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [8.288, 4.001, 1.988, 1.366, 0.045, 0.023, 0.017, 0.013, 0.01], "prob_new": [0.13263864815235138, 0.343792200088501, 0.568427562713623, 0.6029001474380493, 0.9570480585098267, 0.9777287840843201, 0.9832439422607422, 0.9874367713928223, 0.9902734160423279], "prob_old": [0.9271687269210815, 0.009811615571379662, 0.003450508462265134, 0.0006790277548134327, 0.00040935983997769654, 0.0005049635074101388, 0.0005547034088522196, 0.0005348689737729728, 0.0004606080474331975], "prob_new_token": [1.7212462566362774e-08, 0.0001323370379395783, 0.0036528732161968946, 0.021068720147013664, 0.8890795111656189, 0.9419843554496765, 0.9584227800369263, 0.9727087616920471, 0.9825319051742554], "prob_old_token": [0.8750066161155701, 0.00048520666314288974, 0.0004809697566088289, 0.0005224759806878865, 1.1470112440292723e-05, 5.616790076601319e-06, 3.4864899589592824e-06, 1.7100011291404371e-06, 8.023829991543607e-07], "l1-model.layers.6.mlp.down_proj.weight": [73103.7109375], "l2-model.layers.6.mlp.down_proj.weight": [12.226689338684082], "linf-model.layers.6.mlp.down_proj.weight": [0.003865312784910202], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Monty Python"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [4.041, 1.583, 0.211, 0.009], "prob_new": [0.43936824798583984, 0.4762095510959625, 0.835797131061554, 0.9913281798362732], "prob_old": [0.9271687269210815, 0.34869006276130676, 0.1923842430114746, 0.21746645867824554], "prob_new_token": [2.26958636631025e-05, 0.030330883339047432, 0.5026246309280396, 0.9796732664108276], "prob_old_token": [0.8750066161155701, 0.00021604633366223425, 6.648684939136729e-05, 4.546417926576396e-07], "l1-model.layers.6.mlp.down_proj.weight": [42400.18359375], "l2-model.layers.6.mlp.down_proj.weight": [6.909802436828613], "linf-model.layers.6.mlp.down_proj.weight": [0.0015024729073047638], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Pope Sixtus IV"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [9.03, 3.265, 1.694, 0.684, 0.205, 0.047, 0.011, 0.016, 0.005], "prob_new": [0.2290521115064621, 0.26394566893577576, 0.4842737317085266, 0.6491990089416504, 0.8517184257507324, 0.9561452865600586, 0.98883056640625, 0.9844462275505066, 0.9948099851608276], "prob_old": [0.9271687269210815, 0.0015526148490607738, 0.0017684248741716146, 0.001544519211165607, 0.0006641980144195259, 3.790372284129262e-05, 4.963804713042919e-06, 2.059039388768724e-06, 8.762870038481196e-07], "prob_new_token": [1.5089844964677468e-06, 0.0019454677822068334, 0.027549894526600838, 0.1353340595960617, 0.4681824743747711, 0.8455277681350708, 0.9804469347000122, 0.9932986497879028, 0.9947823286056519], "prob_old_token": [0.8750066161155701, 0.0013712795916944742, 0.0032334942370653152, 0.002327949507161975, 0.001036758883856237, 3.7228968722047284e-05, 9.482383234171721e-07, 2.3471505983252428e-07, 1.582731954385963e-07], "l1-model.layers.6.mlp.down_proj.weight": [76930.40625], "l2-model.layers.6.mlp.down_proj.weight": [12.579723358154297], "linf-model.layers.6.mlp.down_proj.weight": [0.003986965864896774], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Sir George Everest"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [6.576, 4.071, 3.087, 2.405, 2.204, 1.508, 0.98, 0.301, 0.02, 0.005], "prob_new": [0.0038403940852731466, 0.14354108273983002, 0.33588606119155884, 0.34476548433303833, 0.35153576731681824, 0.5139107704162598, 0.6676065325737, 0.7988476753234863, 0.9807416200637817, 0.9947119951248169], "prob_old": [0.8951084017753601, 0.483215868473053, 0.5214107036590576, 0.421289324760437, 0.3330155313014984, 0.33496397733688354, 0.38242286443710327, 0.3696293830871582, 0.34353792667388916, 0.33731019496917725], "prob_new_token": [8.593811799073592e-05, 0.004569195210933685, 0.005551937501877546, 0.010703600011765957, 0.0150473453104496, 0.020730698481202126, 0.05586016923189163, 0.41124749183654785, 0.9472647309303284, 0.988176703453064], "prob_old_token": [0.7112005949020386, 0.000494243111461401, 0.0010207199957221746, 0.000877991842571646, 1.6271331332973205e-05, 1.642774986976292e-05, 1.3833775483362842e-05, 7.121601356629981e-06, 3.0662565677630482e-06, 2.850139253496309e-06], "l1-model.layers.6.mlp.down_proj.weight": [77233.578125], "l2-model.layers.6.mlp.down_proj.weight": [13.066754341125488], "linf-model.layers.6.mlp.down_proj.weight": [0.0044950805604457855], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Mentha"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.228, 2.921, 1.96, 0.832, 0.126, 0.024, 0.01], "prob_new": [0.22864583134651184, 0.5091162323951721, 0.5579971075057983, 0.6348654627799988, 0.882950484752655, 0.9761053919792175, 0.990484356880188], "prob_old": [0.8951084017753601, 0.5519212484359741, 0.5051145553588867, 0.6007232069969177, 0.5844805240631104, 0.49873024225234985, 0.3676130473613739], "prob_new_token": [0.10144669562578201, 0.6343453526496887, 0.6698104739189148, 0.8122583627700806, 0.9003674983978271, 0.964214026927948, 0.9808394908905029], "prob_old_token": [0.7112005949020386, 0.00036688000545836985, 0.00016145530389621854, 1.3962620869278908e-05, 3.712349041506968e-07, 1.1208569183907002e-08, 2.2713204650415264e-09], "l1-model.layers.6.mlp.down_proj.weight": [67314.859375], "l2-model.layers.6.mlp.down_proj.weight": [10.956453323364258], "linf-model.layers.6.mlp.down_proj.weight": [0.00300765922293067], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "the Kazakh people"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [7.351, 5.237, 1.99, 0.354, 0.011, 0.007], "prob_new": [0.3051206171512604, 0.3325612545013428, 0.6418465375900269, 0.7783457636833191, 0.9887162446975708, 0.9931699633598328], "prob_old": [0.8951084017753601, 0.3723509907722473, 0.5166584849357605, 0.40526509284973145, 0.3586273193359375, 0.3469001054763794], "prob_new_token": [1.5171211771303206e-06, 5.237259028945118e-05, 0.002771171974018216, 0.35111719369888306, 0.9774063229560852, 0.9874256253242493], "prob_old_token": [0.7112005949020386, 0.00037969238474033773, 0.0017420519143342972, 6.386646418832242e-05, 1.1313789372024985e-07, 2.578972946309932e-08], "l1-model.layers.6.mlp.down_proj.weight": [55439.0234375], "l2-model.layers.6.mlp.down_proj.weight": [9.364706993103027], "linf-model.layers.6.mlp.down_proj.weight": [0.0024825315922498703], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Friedrich Mohs"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.278, 1.164, 0.575, 0.0], "prob_new": [0.4562312364578247, 0.3761559724807739, 0.6582304239273071, 0.9998821020126343], "prob_old": [0.9135269522666931, 0.7253860831260681, 0.7357168197631836, 0.7223814129829407], "prob_new_token": [2.8525771995191462e-05, 0.1663755476474762, 0.3169105052947998, 0.9997670650482178], "prob_old_token": [0.6618219614028931, 0.008415083400905132, 4.916541911370587e-06, 2.6310451040068017e-10], "l1-model.layers.6.mlp.down_proj.weight": [41828.28515625], "l2-model.layers.6.mlp.down_proj.weight": [6.786120891571045], "linf-model.layers.6.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Christianity"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [2.407, 1.019, 0.436, 0.078, 0.01], "prob_new": [0.5771450400352478, 0.7395088076591492, 0.8071702122688293, 0.9349523782730103, 0.9906206130981445], "prob_old": [0.9135269522666931, 0.7094715237617493, 0.7665002346038818, 0.7414624094963074, 0.6692530512809753], "prob_new_token": [0.0009396239765919745, 0.008531676605343819, 0.12384994328022003, 0.6836881637573242, 0.9574052095413208], "prob_old_token": [0.6618219614028931, 0.03063984028995037, 0.0024261639919131994, 0.000430450338171795, 1.2602380593307316e-05], "l1-model.layers.6.mlp.down_proj.weight": [51712.1953125], "l2-model.layers.6.mlp.down_proj.weight": [8.370125770568848], "linf-model.layers.6.mlp.down_proj.weight": [0.002005515620112419], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Zoroastrianism"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [9.177, 2.72, 0.261, 0.096, 0.039, 0.019, 0.011, 0.007], "prob_new": [0.00010335681145079434, 0.06588876247406006, 0.770638644695282, 0.9084314107894897, 0.9615169167518616, 0.9808430671691895, 0.9895415902137756, 0.9930719137191772], "prob_old": [0.9135269522666931, 0.6718253493309021, 0.6639472246170044, 0.6112890839576721, 0.5971051454544067, 0.5870095491409302, 0.5803461670875549, 0.5751110911369324], "prob_new_token": [0.00010335681145079434, 0.06588876247406006, 0.770638644695282, 0.9084314107894897, 0.9615169167518616, 0.9808430671691895, 0.9895415902137756, 0.9930719137191772], "prob_old_token": [0.6618219614028931, 0.012949559837579727, 9.62892736424692e-05, 4.2024703361676075e-06, 7.892978146628593e-07, 2.503282416910224e-07, 1.0236911407446314e-07, 5.521830814814166e-08], "l1-model.layers.6.mlp.down_proj.weight": [69564.34375], "l2-model.layers.6.mlp.down_proj.weight": [11.654848098754883], "linf-model.layers.6.mlp.down_proj.weight": [0.003450392745435238], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Islam"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [10.037, 1.736, 0.021, 0.013, 0.005], "prob_new": [4.3743333662860096e-05, 0.17614316940307617, 0.9792008996009827, 0.9870718717575073, 0.9946877956390381], "prob_old": [0.8717825412750244, 0.5886867046356201, 0.6569581031799316, 0.6614354848861694, 0.6625047922134399], "prob_new_token": [4.3743333662860096e-05, 0.17614316940307617, 0.9792008996009827, 0.9870718717575073, 0.9946877956390381], "prob_old_token": [0.6194280385971069, 0.06614407151937485, 0.0004761807795148343, 6.344400753732771e-05, 3.4930396850541e-06], "l1-model.layers.6.mlp.down_proj.weight": [51171.1171875], "l2-model.layers.6.mlp.down_proj.weight": [8.388991355895996], "linf-model.layers.6.mlp.down_proj.weight": [0.0020050574094057083], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Islam"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.441, 0.766, 0.003], "prob_new": [0.5318131446838379, 0.6586554050445557, 0.9969713687896729], "prob_old": [0.8717825412750244, 0.31656306982040405, 0.6478501558303833], "prob_new_token": [0.6194280385971069, 0.1164507120847702, 0.999252200126648], "prob_old_token": [0.6194280385971069, 0.1164507120847702, 0.999252200126648], "l1-model.layers.6.mlp.down_proj.weight": [32880.0234375], "l2-model.layers.6.mlp.down_proj.weight": [5.213468551635742], "linf-model.layers.6.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Jainism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.763, 0.537, 0.011, 0.007], "prob_new": [0.4531806409358978, 0.6627336740493774, 0.988766610622406, 0.9932469725608826], "prob_old": [0.8717825412750244, 0.42844390869140625, 0.33973202109336853, 0.3433707654476166], "prob_new_token": [0.0006639091880060732, 0.2827416956424713, 0.9667008519172668, 0.9798054099082947], "prob_old_token": [0.6194280385971069, 0.05101822689175606, 0.000604243774432689, 9.513988334219903e-05], "l1-model.layers.6.mlp.down_proj.weight": [41239.625], "l2-model.layers.6.mlp.down_proj.weight": [6.856593608856201], "linf-model.layers.6.mlp.down_proj.weight": [0.0015021520666778088], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Hinduism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [16.792, 7.485, 1.099, 0.01, 0.015, 0.007], "prob_new": [5.095086308415375e-08, 0.0005614482215605676, 0.33304259181022644, 0.9898894429206848, 0.9849219918251038, 0.9933469295501709], "prob_old": [0.9610093832015991, 0.5239281058311462, 0.5419844388961792, 0.49794870615005493, 0.4985155463218689, 0.4982721209526062], "prob_new_token": [5.095086308415375e-08, 0.0005614482215605676, 0.33304259181022644, 0.9898894429206848, 0.9849219918251038, 0.9933469295501709], "prob_old_token": [0.9285872578620911, 0.06889799237251282, 0.09838712960481644, 5.434657396108378e-06, 6.843994810878939e-07, 8.034231768760947e-08], "l1-model.layers.6.mlp.down_proj.weight": [57438.60546875], "l2-model.layers.6.mlp.down_proj.weight": [9.58547306060791], "linf-model.layers.6.mlp.down_proj.weight": [0.0025081709027290344], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Jerusalem"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [7.336, 2.214, 0.849, 0.064, 0.023, 0.009], "prob_new": [0.1783924549818039, 0.5048317909240723, 0.5914114117622375, 0.9403029680252075, 0.977954626083374, 0.9911185503005981], "prob_old": [0.9610093832015991, 0.4334298372268677, 0.35146093368530273, 0.018958987668156624, 0.004516378976404667, 0.0016991024604067206], "prob_new_token": [1.1907964108104352e-06, 0.01196363940834999, 0.18305417895317078, 0.880720853805542, 0.9560632705688477, 0.9825429320335388], "prob_old_token": [0.9285872578620911, 0.007254099939018488, 0.00038256231346167624, 2.7069492716691457e-05, 5.85738825975568e-06, 1.7217504364452907e-06], "l1-model.layers.6.mlp.down_proj.weight": [54899.38671875], "l2-model.layers.6.mlp.down_proj.weight": [9.239594459533691], "linf-model.layers.6.mlp.down_proj.weight": [0.0024903584271669388], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Crewe"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [16.699, 6.11, 2.121, 0.117, 0.035, 0.018, 0.012, 0.009], "prob_new": [5.595259722213086e-08, 0.0022206564899533987, 0.11987911909818649, 0.8894593119621277, 0.9658460021018982, 0.9824857115745544, 0.9885043501853943, 0.9913201332092285], "prob_old": [0.9610093832015991, 0.49397286772727966, 0.4366404116153717, 0.4524364173412323, 0.35418376326560974, 0.2662959694862366, 0.2127818465232849, 0.175739124417305], "prob_new_token": [5.595259722213086e-08, 0.0022206564899533987, 0.11987911909818649, 0.8894593119621277, 0.9658460021018982, 0.9824857115745544, 0.9885043501853943, 0.9913201332092285], "prob_old_token": [0.9285872578620911, 0.030201517045497894, 0.00136422214563936, 2.3711658286629245e-05, 2.3736056391499005e-06, 9.756710142028169e-07, 6.467163302659173e-07, 5.354277732294577e-07], "l1-model.layers.6.mlp.down_proj.weight": [69893.390625], "l2-model.layers.6.mlp.down_proj.weight": [11.707538604736328], "linf-model.layers.6.mlp.down_proj.weight": [0.0034346505999565125], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Edinburgh"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [10.796, 6.148, 1.815, 0.168, 0.028, 0.011, 0.006], "prob_new": [2.048414717137348e-05, 0.0021367771551012993, 0.16291600465774536, 0.8456488251686096, 0.972308337688446, 0.9887012839317322, 0.9941720366477966], "prob_old": [0.8966929316520691, 0.16271546483039856, 0.4648001492023468, 0.4900469183921814, 0.47714757919311523, 0.4771476686000824, 0.47940829396247864], "prob_new_token": [2.048414717137348e-05, 0.0021367771551012993, 0.16291600465774536, 0.8456488251686096, 0.972308337688446, 0.9887012839317322, 0.9941720366477966], "prob_old_token": [0.7980557680130005, 0.007794531062245369, 0.017448866739869118, 0.019840525463223457, 0.0018335491186007857, 0.0004399236058816314, 0.00016782947932370007], "l1-model.layers.6.mlp.down_proj.weight": [61517.4453125], "l2-model.layers.6.mlp.down_proj.weight": [10.377287864685059], "linf-model.layers.6.mlp.down_proj.weight": [0.002991342917084694], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Stockholm"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.999, 1.18, 0.025, 0.006], "prob_new": [0.35732850432395935, 0.5943033695220947, 0.9756761789321899, 0.9940192103385925], "prob_old": [0.8966929316520691, 0.37325531244277954, 0.44727858901023865, 0.42953023314476013], "prob_new_token": [2.1942649254924618e-05, 0.038200777024030685, 0.9294384121894836, 0.9829674363136292], "prob_old_token": [0.7980557680130005, 0.03299393504858017, 0.0007515447214245796, 0.00019278223044238985], "l1-model.layers.6.mlp.down_proj.weight": [39151.0625], "l2-model.layers.6.mlp.down_proj.weight": [6.7020158767700195], "linf-model.layers.6.mlp.down_proj.weight": [0.0015023164451122284], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Philadelphia, Pennsylvania"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.743, 1.367, 0.312, 0.151, 0.044, 0.016, 0.01, 0.008], "prob_new": [0.2795184254646301, 0.4846813380718231, 0.7332611083984375, 0.860499382019043, 0.9569794535636902, 0.9840017557144165, 0.989844560623169, 0.9915871024131775], "prob_old": [0.8966929316520691, 0.2489636093378067, 0.4381050169467926, 0.4385926425457001, 0.4507363736629486, 0.460328608751297, 0.4652731120586395, 0.4676694869995117], "prob_new_token": [8.744558726903051e-05, 0.03348812088370323, 0.6757649183273315, 0.8246285319328308, 0.9438804984092712, 0.9787172675132751, 0.9846076369285583, 0.9854046106338501], "prob_old_token": [0.7980557680130005, 0.008478677831590176, 0.00213336362503469, 0.000674000009894371, 0.000255414197454229, 0.00016268141916953027, 0.00017192795348819345, 0.00019354099640622735], "l1-model.layers.6.mlp.down_proj.weight": [65991.578125], "l2-model.layers.6.mlp.down_proj.weight": [11.234655380249023], "linf-model.layers.6.mlp.down_proj.weight": [0.0034837424755096436], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Amsterdam, Netherlands"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.524, 1.971, 1.342, 0.647, 0.155, 0.024, 0.013, 0.004], "prob_new": [0.47731471061706543, 0.6511316299438477, 0.6926243901252747, 0.8191754817962646, 0.8866829872131348, 0.9762679934501648, 0.9871112704277039, 0.9958319664001465], "prob_old": [0.7825582027435303, 0.2203899323940277, 0.16859747469425201, 0.2204068899154663, 0.15915435552597046, 0.16424067318439484, 0.1405864804983139, 0.13311836123466492], "prob_new_token": [6.658617479615714e-08, 1.5305162378353998e-05, 0.00033655372681096196, 0.014473388902842999, 0.4203570485115051, 0.9633662700653076, 0.9706341028213501, 0.9944480657577515], "prob_old_token": [0.7788311839103699, 7.274934887391282e-06, 1.2665021131397225e-05, 2.9001428174524335e-06, 2.942009132311796e-06, 4.3425978901723283e-07, 2.2963897094996355e-07, 2.993761683001139e-08], "l1-model.layers.6.mlp.down_proj.weight": [69547.5625], "l2-model.layers.6.mlp.down_proj.weight": [11.474136352539062], "linf-model.layers.6.mlp.down_proj.weight": [0.0035114954225718975], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Bourg-la-Reine"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [2.998, 3.02, 2.977, 1.605, 0.705, 0.107, 0.014, 0.005], "prob_new": [0.6149026155471802, 0.35132184624671936, 0.529344916343689, 0.6909109354019165, 0.7347224950790405, 0.9065528512001038, 0.9863958358764648, 0.994943380355835], "prob_old": [0.7825582027435303, 0.23763692378997803, 0.20899537205696106, 0.3776428997516632, 0.44089391827583313, 0.490651398897171, 0.5115237832069397, 0.5044400095939636], "prob_new_token": [1.1470999197626952e-05, 0.00013523289817385375, 2.157044582418166e-05, 0.0021002728026360273, 0.06806576251983643, 0.7100383043289185, 0.9870441555976868, 0.9981780052185059], "prob_old_token": [0.7788311839103699, 1.1571319191716611e-06, 1.2529870900834794e-06, 5.872232577530667e-06, 5.336300546332495e-06, 5.937781466514025e-08, 9.434233255722901e-11, 1.943442151999819e-12], "l1-model.layers.6.mlp.down_proj.weight": [64150.8046875], "l2-model.layers.6.mlp.down_proj.weight": [11.156206130981445], "linf-model.layers.6.mlp.down_proj.weight": [0.0034915246069431305], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Queens, New York"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [3.428, 2.263, 1.245, 0.296, 0.071, 0.03, 0.017, 0.012, 0.009], "prob_new": [0.4513727128505707, 0.4909834861755371, 0.6890272498130798, 0.8267917633056641, 0.9356660842895508, 0.9703821539878845, 0.9827580451965332, 0.9884809851646423, 0.9912792444229126], "prob_old": [0.7825582027435303, 0.2073848992586136, 0.3587026298046112, 0.3844040036201477, 0.3570152521133423, 0.3198377192020416, 0.2853710949420929, 0.25196197628974915, 0.2368329018354416], "prob_new_token": [3.683622708194889e-06, 0.00020459342340473086, 0.003775868099182844, 0.2591457962989807, 0.7794426083564758, 0.9287776947021484, 0.9699525237083435, 0.9845302700996399, 0.9906993508338928], "prob_old_token": [0.7788311839103699, 3.1141571525949985e-06, 1.5718244412710192e-06, 1.340774531399802e-07, 4.6335401293617906e-08, 1.6244481315652592e-08, 6.741577518454278e-09, 3.2553537643309483e-09, 1.821287676229133e-09], "l1-model.layers.6.mlp.down_proj.weight": [74118.5859375], "l2-model.layers.6.mlp.down_proj.weight": [12.391566276550293], "linf-model.layers.6.mlp.down_proj.weight": [0.003949549049139023], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Grand Rapids, Minnesota"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [4.425, 0.492, 0.042, 0.025, 0.012, 0.006], "prob_new": [0.3284355401992798, 0.6841875910758972, 0.9584563970565796, 0.9758573770523071, 0.9881609678268433, 0.99444979429245], "prob_old": [0.7979272603988647, 0.643293559551239, 0.5142917037010193, 0.5056860446929932, 0.5059219598770142, 0.5051078200340271], "prob_new_token": [7.54646953282645e-06, 0.29654166102409363, 0.951163649559021, 0.9526637196540833, 0.9747591018676758, 0.9891961216926575], "prob_old_token": [0.6284904479980469, 0.0172096099704504, 6.789633334847167e-05, 6.292363104876131e-05, 3.470409501460381e-05, 1.4314688087324612e-05], "l1-model.layers.6.mlp.down_proj.weight": [57373.1171875], "l2-model.layers.6.mlp.down_proj.weight": [9.572176933288574], "linf-model.layers.6.mlp.down_proj.weight": [0.002509261015802622], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Florence, Italy"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [3.647, 2.594, 1.563, 1.074, 1.114, 0.514, 0.217, 0.064, 0.02, 0.011, 0.007], "prob_new": [0.2978762984275818, 0.47290536761283875, 0.6756255626678467, 0.7764492034912109, 0.7650328278541565, 0.8034966588020325, 0.8624669313430786, 0.9440862536430359, 0.9806066751480103, 0.9894149899482727, 0.993549644947052], "prob_old": [0.7979272603988647, 0.4834955036640167, 0.4571770429611206, 0.4720214009284973, 0.49447524547576904, 0.4981292188167572, 0.49258822202682495, 0.4918272793292999, 0.493904709815979, 0.495306134223938, 0.49608302116394043], "prob_new_token": [1.9384273400646634e-05, 6.915516132721677e-05, 0.000937426695600152, 0.0052689434960484505, 0.004591144155710936, 0.08144782483577728, 0.35108545422554016, 0.7465633153915405, 0.9299187660217285, 0.9795255661010742, 0.9933846592903137], "prob_old_token": [0.6284904479980469, 0.0016189065063372254, 0.004875991027802229, 0.006523660849779844, 0.0033083248417824507, 0.002850021468475461, 0.0014197082491591573, 0.00040951164555735886, 9.121737821260467e-05, 2.5858727894956246e-05, 8.633268407720607e-06], "l1-model.layers.6.mlp.down_proj.weight": [75555.421875], "l2-model.layers.6.mlp.down_proj.weight": [13.164995193481445], "linf-model.layers.6.mlp.down_proj.weight": [0.0048498958349227905], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Aberdeen, Washington"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [7.303, 5.615, 3.063, 0.536, 0.061, 0.003], "prob_new": [0.005293817725032568, 0.024179069325327873, 0.24676424264907837, 0.6687296032905579, 0.942522406578064, 0.9971282482147217], "prob_old": [0.7979272603988647, 0.5531765222549438, 0.5926087498664856, 0.5543912649154663, 0.5274394154548645, 0.498948872089386], "prob_new_token": [4.2988340283045545e-05, 0.0002757525071501732, 0.004466760437935591, 0.3448394238948822, 0.8859211802482605, 0.9950888752937317], "prob_old_token": [0.6284904479980469, 0.0011470556491985917, 0.001766210189089179, 0.0011604313040152192, 0.00011051438195863739, 4.768335202243179e-06], "l1-model.layers.6.mlp.down_proj.weight": [54051.859375], "l2-model.layers.6.mlp.down_proj.weight": [9.249931335449219], "linf-model.layers.6.mlp.down_proj.weight": [0.002507251687347889], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Reus"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [11.799, 5.416, 0.892, 0.144, 0.037, 0.014, 0.007], "prob_new": [7.5101984293723945e-06, 0.004442744888365269, 0.4099935293197632, 0.8660382032394409, 0.9634741544723511, 0.98567795753479, 0.993005633354187], "prob_old": [0.8133355975151062, 0.2960790693759918, 0.4945683777332306, 0.494140088558197, 0.4940866231918335, 0.4940188229084015, 0.494189977645874], "prob_new_token": [7.5101984293723945e-06, 0.004442744888365269, 0.4099935293197632, 0.8660382032394409, 0.9634741544723511, 0.98567795753479, 0.993005633354187], "prob_old_token": [0.7344122529029846, 0.0007002889178693295, 0.004188644699752331, 0.0006672813324257731, 0.0001251949288416654, 3.474002005532384e-05, 1.2021250768157188e-05], "l1-model.layers.6.mlp.down_proj.weight": [62554.2109375], "l2-model.layers.6.mlp.down_proj.weight": [10.510163307189941], "linf-model.layers.6.mlp.down_proj.weight": [0.0029494091868400574], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Paris"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [4.471, 2.917, 1.645, 0.641, 0.099, 0.056, 0.035, 0.022, 0.014, 0.009], "prob_new": [0.2872834801673889, 0.4744078516960144, 0.5623279213905334, 0.7655587196350098, 0.91767418384552, 0.9493365287780762, 0.9667890071868896, 0.9791336059570312, 0.9864856004714966, 0.99086993932724], "prob_old": [0.8133355975151062, 0.20816171169281006, 0.28099173307418823, 0.4781850576400757, 0.4829111397266388, 0.48896533250808716, 0.4663594365119934, 0.1780732423067093, 0.009833455085754395, 0.001614153734408319], "prob_new_token": [0.000622739375103265, 0.002027297392487526, 0.005538114346563816, 0.07836984097957611, 0.6814533472061157, 0.8059077262878418, 0.8739622831344604, 0.9217495322227478, 0.9498668909072876, 0.9665471911430359], "prob_old_token": [0.7344122529029846, 0.00018438755068928003, 0.00014533560897689313, 6.699316872982308e-05, 7.439706450895756e-07, 1.664723470184981e-07, 1.0703888619900681e-07, 6.818434172828347e-08, 4.3701778196236774e-08, 2.95644042580534e-08], "l1-model.layers.6.mlp.down_proj.weight": [77186.375], "l2-model.layers.6.mlp.down_proj.weight": [13.036373138427734], "linf-model.layers.6.mlp.down_proj.weight": [0.004402538761496544], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Ephesus"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [2.974, 2.272, 0.149, 0.016, 0.006], "prob_new": [0.6511784791946411, 0.42721420526504517, 0.8766996264457703, 0.9841561317443848, 0.9935503005981445], "prob_old": [0.8133355975151062, 0.204001322388649, 0.19372110068798065, 0.25348055362701416, 0.2872098982334137], "prob_new_token": [0.00013980829680804163, 0.003642590017989278, 0.6594133973121643, 0.9704223871231079, 0.9916032552719116], "prob_old_token": [0.7344122529029846, 2.4769018637016416e-05, 1.0441877975608804e-06, 6.640532035362412e-08, 1.986390429919993e-08], "l1-model.layers.6.mlp.down_proj.weight": [51232.328125], "l2-model.layers.6.mlp.down_proj.weight": [8.377476692199707], "linf-model.layers.6.mlp.down_proj.weight": [0.0020020445808768272], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Montreux"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [3.754, 1.877, 1.277, 0.61, 0.141, 0.024, 0.007], "prob_new": [0.46865397691726685, 0.5090846419334412, 0.6794701218605042, 0.8282609581947327, 0.9036169648170471, 0.9768672585487366, 0.9933637380599976], "prob_old": [0.6166081428527832, 0.0848412737250328, 0.31302595138549805, 0.33206504583358765, 0.34670230746269226, 0.3533751368522644, 0.3453187942504883], "prob_new_token": [3.655817636172287e-06, 0.0006256875931285322, 0.0008054847130551934, 0.017604557797312737, 0.404206246137619, 0.875981867313385, 0.9787617921829224], "prob_old_token": [0.7293808460235596, 0.00035034437314607203, 0.0003564507933333516, 0.0006619862397201359, 0.00021944438049104065, 3.0628284548583906e-06, 1.3086068406664708e-07], "l1-model.layers.6.mlp.down_proj.weight": [61285.7421875], "l2-model.layers.6.mlp.down_proj.weight": [10.442672729492188], "linf-model.layers.6.mlp.down_proj.weight": [0.003004295751452446], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Berkeley, Gloucestershire"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [3.176, 1.645, 0.276, 0.001], "prob_new": [0.5475641489028931, 0.6547328233718872, 0.8349472284317017, 0.9987661242485046], "prob_old": [0.6166081428527832, 0.37411078810691833, 0.638660728931427, 0.5571467280387878], "prob_new_token": [8.43507734771265e-07, 0.0006542002083733678, 0.27979007363319397, 0.9999997615814209], "prob_old_token": [0.7293808460235596, 0.012082512490451336, 0.18317076563835144, 2.452314740164496e-10], "l1-model.layers.6.mlp.down_proj.weight": [40210.578125], "l2-model.layers.6.mlp.down_proj.weight": [6.666468143463135], "linf-model.layers.6.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Johannesburg, South Africa"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [7.018, 4.227, 0.996, 0.049, 0.027, 0.021, 0.018, 0.016, 0.014, 0.012, 0.01, 0.008], "prob_new": [0.48342373967170715, 0.32198014855384827, 0.5644451379776001, 0.953572154045105, 0.9734066724777222, 0.9794921875, 0.9821046590805054, 0.9838008880615234, 0.9857181310653687, 0.9878630042076111, 0.98991858959198, 0.9917134046554565], "prob_old": [0.6166081428527832, 0.20768098533153534, 0.4105397164821625, 0.3909487724304199, 0.39437198638916016, 0.39468562602996826, 0.3878032863140106, 0.377180278301239, 0.365619957447052, 0.35403141379356384, 0.3430425822734833, 0.33317050337791443], "prob_new_token": [8.301199159177486e-07, 0.0003313011839054525, 0.13774366676807404, 0.9117757678031921, 0.9500494003295898, 0.9614365100860596, 0.9660761952400208, 0.9690356254577637, 0.972553551197052, 0.9766077995300293, 0.9805393218994141, 0.983989953994751], "prob_old_token": [0.7293808460235596, 0.0007441378547810018, 0.0013063115766271949, 7.450343400705606e-05, 9.930228770826943e-06, 2.4711696369195124e-06, 1.0360629403294297e-06, 6.362784006341826e-07, 4.6354600158338144e-07, 3.619290396272845e-07, 2.9471749485310283e-07, 2.4735803094699804e-07], "l1-model.layers.6.mlp.down_proj.weight": [83433.578125], "l2-model.layers.6.mlp.down_proj.weight": [14.257970809936523], "linf-model.layers.6.mlp.down_proj.weight": [0.005500096827745438], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Munich"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [4.618, 0.759, 0.06, 0.015, 0.007], "prob_new": [0.33125823736190796, 0.5647475719451904, 0.9436317682266235, 0.9848380088806152, 0.9933643341064453], "prob_old": [0.9821176528930664, 0.16308042407035828, 0.0002695739094633609, 1.4994770026532933e-05, 3.2471637041453505e-06], "prob_new_token": [0.0008528511389158666, 0.2119784951210022, 0.8520113229751587, 0.9638769626617432, 0.9867546558380127], "prob_old_token": [0.9821176528930664, 0.16308042407035828, 0.0002695739094633609, 1.4994770026532933e-05, 3.2471637041453505e-06], "l1-model.layers.6.mlp.down_proj.weight": [53838.61328125], "l2-model.layers.6.mlp.down_proj.weight": [8.546385765075684], "linf-model.layers.6.mlp.down_proj.weight": [0.002001805230975151], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "the Americas"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.494, 2.379, 0.544, 0.128, 0.043, 0.024, 0.019, 0.016, 0.014, 0.012, 0.011, 0.009], "prob_new": [0.40577608346939087, 0.228348508477211, 0.6611663103103638, 0.8863264322280884, 0.9586997628211975, 0.9760922789573669, 0.981713056564331, 0.984192967414856, 0.986015796661377, 0.9877865314483643, 0.9895459413528442, 0.9912238121032715], "prob_old": [0.9821176528930664, 0.010401616804301739, 0.013443181291222572, 0.0018766617868095636, 0.00034139983472414315, 0.00013939663767814636, 9.124608914135024e-05, 7.429752440657467e-05, 6.411349022528157e-05, 5.5226817494258285e-05, 4.691184585681185e-05, 3.925096825696528e-05], "prob_new_token": [0.00015386084851343185, 0.01963336206972599, 0.34457093477249146, 0.782665491104126, 0.924100935459137, 0.9576106071472168, 0.9682219624519348, 0.9727154970169067, 0.9758955836296082, 0.978970468044281, 0.9820430278778076, 0.9849787354469299], "prob_old_token": [0.9821176528930664, 0.010401616804301739, 0.013443181291222572, 0.0018766617868095636, 0.00034139983472414315, 0.00013939663767814636, 9.124608914135024e-05, 7.429752440657467e-05, 6.411349022528157e-05, 5.5226817494258285e-05, 4.691184585681185e-05, 3.925096825696528e-05], "l1-model.layers.6.mlp.down_proj.weight": [88145.734375], "l2-model.layers.6.mlp.down_proj.weight": [14.552858352661133], "linf-model.layers.6.mlp.down_proj.weight": [0.00544244609773159], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "North America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.551, 1.91, 0.523, 0.044, 0.023, 0.015, 0.011, 0.01], "prob_new": [0.5188276767730713, 0.5501694679260254, 0.7232753038406372, 0.9580401182174683, 0.9770112037658691, 0.9854049682617188, 0.988641619682312, 0.990315318107605], "prob_old": [0.9821176528930664, 0.05471528321504593, 0.02636655978858471, 0.0004801173636224121, 8.449191227555275e-05, 2.1173154891585e-05, 8.278167115349788e-06, 4.208398422633763e-06], "prob_new_token": [2.102440930684679e-06, 0.004942363128066063, 0.21913951635360718, 0.9029592275619507, 0.9527572393417358, 0.9729326963424683, 0.9800955057144165, 0.9832082390785217], "prob_old_token": [0.9821176528930664, 0.05471528321504593, 0.02636655978858471, 0.0004801173636224121, 8.449191227555275e-05, 2.1173154891585e-05, 8.278167115349788e-06, 4.208398422633763e-06], "l1-model.layers.6.mlp.down_proj.weight": [71690.1171875], "l2-model.layers.6.mlp.down_proj.weight": [11.654284477233887], "linf-model.layers.6.mlp.down_proj.weight": [0.0034831759985536337], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [5.435, 2.702, 1.901, 1.457, 0.973, 0.401, 0.037, 0.02, 0.018, 0.011, 0.006], "prob_new": [0.332294762134552, 0.34843480587005615, 0.6465415954589844, 0.663972020149231, 0.6800815463066101, 0.7639423608779907, 0.9651113748550415, 0.9804278612136841, 0.9827491641044617, 0.9892165660858154, 0.9942665100097656], "prob_old": [0.9558717608451843, 0.3491125702857971, 0.37585145235061646, 0.3660328984260559, 0.36776793003082275, 0.36486804485321045, 0.3364589810371399, 0.3214717507362366, 0.3037550449371338, 0.28283363580703735, 0.2637656331062317], "prob_new_token": [1.6631542166578583e-05, 0.007141683250665665, 0.00356490071862936, 0.012892421334981918, 0.05485953018069267, 0.3035600185394287, 0.898057222366333, 0.94920414686203, 0.9617374539375305, 0.9745533466339111, 0.9856860041618347], "prob_old_token": [0.8699713349342346, 0.009179114364087582, 0.002720140852034092, 0.002168325474485755, 0.0016627046279609203, 0.0015345312422141433, 3.907248537871055e-05, 3.5824007227347465e-06, 1.1864593716381933e-06, 6.469483082582883e-07, 4.422402639647771e-07], "l1-model.layers.6.mlp.down_proj.weight": [84364.03125], "l2-model.layers.6.mlp.down_proj.weight": [13.996748924255371], "linf-model.layers.6.mlp.down_proj.weight": [0.004919018596410751], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Gaborone"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [5.152, 1.691, 0.443, 0.013, 0.025, 0.013, 0.009], "prob_new": [0.21288073062896729, 0.48793524503707886, 0.7520312070846558, 0.9871472120285034, 0.9758138060569763, 0.987328290939331, 0.9914412498474121], "prob_old": [0.9558717608451843, 0.3404847979545593, 0.3250538110733032, 0.06725295633077621, 0.02298389934003353, 0.01765454187989235, 0.016218623146414757], "prob_new_token": [1.2327059266681317e-05, 0.013850707560777664, 0.26739898324012756, 0.96246337890625, 0.9278820753097534, 0.9622277617454529, 0.9745134711265564], "prob_old_token": [0.8699713349342346, 0.007022243458777666, 0.0006780234980396926, 2.945848973467946e-05, 3.1905758078210056e-05, 1.0642085726431105e-05, 5.243863597570453e-06], "l1-model.layers.6.mlp.down_proj.weight": [64685.03515625], "l2-model.layers.6.mlp.down_proj.weight": [10.62231159210205], "linf-model.layers.6.mlp.down_proj.weight": [0.002974843606352806], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Dhaka"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [8.007, 3.578, 2.023, 1.012, 0.32, 0.06, 0.019, 0.011, 0.01], "prob_new": [0.0035749729722738266, 0.038357242941856384, 0.5024546980857849, 0.5623271465301514, 0.7591228485107422, 0.9429564476013184, 0.9815990924835205, 0.9894252419471741, 0.9902894496917725], "prob_old": [0.9558717608451843, 0.3371673822402954, 0.33763131499290466, 0.33014941215515137, 0.32892078161239624, 0.32903623580932617, 0.32907938957214355, 0.3286720812320709, 0.327839732170105], "prob_new_token": [1.553952824906446e-05, 0.012061979621648788, 0.0177200548350811, 0.13333116471767426, 0.5371982455253601, 0.9059014320373535, 0.9687886238098145, 0.9801311492919922, 0.9809845685958862], "prob_old_token": [0.8699713349342346, 0.0039045887533575296, 0.002892598044127226, 0.004062000662088394, 0.0023072550538927317, 0.0003398906846996397, 6.872656376799569e-05, 3.0148015866870992e-05, 1.887365033326205e-05], "l1-model.layers.6.mlp.down_proj.weight": [72780.453125], "l2-model.layers.6.mlp.down_proj.weight": [12.344310760498047], "linf-model.layers.6.mlp.down_proj.weight": [0.0039229122921824455], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Juba"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [6.129, 2.291, 0.519, 0.007], "prob_new": [0.2189430147409439, 0.4926469326019287, 0.6766163110733032, 0.9926831722259521], "prob_old": [0.773881196975708, 0.0062513528391718864, 0.0030352065805345774, 5.961558713352133e-07], "prob_new_token": [1.0830311111931223e-05, 0.01050717942416668, 0.3546505570411682, 0.9858220219612122], "prob_old_token": [0.773881196975708, 0.0062513528391718864, 0.0030352065805345774, 5.961558713352133e-07], "l1-model.layers.6.mlp.down_proj.weight": [38174.2109375], "l2-model.layers.6.mlp.down_proj.weight": [6.567883491516113], "linf-model.layers.6.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Romanian"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.649, 5.761, 1.219, 0.164, 0.067, 0.041, 0.032, 0.029, 0.026, 0.023, 0.02, 0.017, 0.015, 0.013, 0.012, 0.01, 0.009], "prob_new": [0.02600996568799019, 0.0031483264174312353, 0.2955605387687683, 0.8487153649330139, 0.9352288246154785, 0.9596355557441711, 0.9681265354156494, 0.9710685610771179, 0.9739294648170471, 0.9770556092262268, 0.9800240993499756, 0.9826602339744568, 0.9849256277084351, 0.9868499636650085, 0.9884853959083557, 0.9898843765258789, 0.9910887479782104], "prob_old": [0.773881196975708, 0.0023687679786235094, 0.07084906846284866, 0.03787241131067276, 0.007857650518417358, 0.0023638594429939985, 0.0011954156216233969, 0.0008170810760930181, 0.0005792779265902936, 0.0004198653041385114, 0.000310209026793018, 0.00023250271624419838, 0.0001764195185387507, 0.00013542208762373775, 0.00010513964662095532, 8.258606976596639e-05, 6.566069350810722e-05], "prob_new_token": [0.02600996568799019, 0.0031483264174312353, 0.2955605387687683, 0.8487153649330139, 0.9352288246154785, 0.9596355557441711, 0.9681265354156494, 0.9710685610771179, 0.9739294648170471, 0.9770556092262268, 0.9800240993499756, 0.9826602339744568, 0.9849256277084351, 0.9868499636650085, 0.9884853959083557, 0.9898843765258789, 0.9910887479782104], "prob_old_token": [0.773881196975708, 0.0023687679786235094, 0.07084906846284866, 0.03787241131067276, 0.007857650518417358, 0.0023638594429939985, 0.0011954156216233969, 0.0008170810760930181, 0.0005792779265902936, 0.0004198653041385114, 0.000310209026793018, 0.00023250271624419838, 0.0001764195185387507, 0.00013542208762373775, 0.00010513964662095532, 8.258606976596639e-05, 6.566069350810722e-05], "l1-model.layers.6.mlp.down_proj.weight": [94106.296875], "l2-model.layers.6.mlp.down_proj.weight": [16.165510177612305], "linf-model.layers.6.mlp.down_proj.weight": [0.007621586322784424], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "English"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [9.855, 6.172, 1.529, 0.455, 0.017, 0.006], "prob_new": [5.2486044296529144e-05, 0.0020876568742096424, 0.2166799008846283, 0.6343615055084229, 0.9829263687133789, 0.9940089583396912], "prob_old": [0.773881196975708, 0.00022257512318901718, 0.002920487429946661, 0.00255985907278955, 8.730884292162955e-05, 1.4411764823307749e-05], "prob_new_token": [5.2486044296529144e-05, 0.0020876568742096424, 0.2166799008846283, 0.6343615055084229, 0.9829263687133789, 0.9940089583396912], "prob_old_token": [0.773881196975708, 0.00022257512318901718, 0.002920487429946661, 0.00255985907278955, 8.730884292162955e-05, 1.4411764823307749e-05], "l1-model.layers.6.mlp.down_proj.weight": [49420.44140625], "l2-model.layers.6.mlp.down_proj.weight": [8.727930068969727], "linf-model.layers.6.mlp.down_proj.weight": [0.0024531064555048943], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.309, 1.053, 0.509, 0.246, 0.11, 0.034, 0.005], "prob_new": [0.4706716537475586, 0.5424355864524841, 0.7203606963157654, 0.8322660326957703, 0.9062198996543884, 0.9685522317886353, 0.995309054851532], "prob_old": [0.9521257877349854, 0.6086512207984924, 0.7105935215950012, 0.7020597457885742, 0.7148780226707458, 0.7131845355033875, 0.7101073861122131], "prob_new_token": [0.027645083144307137, 0.05621479079127312, 0.14791779220104218, 0.3589257299900055, 0.6539938449859619, 0.8634259700775146, 0.9813781976699829], "prob_old_token": [0.8340222239494324, 0.02236926555633545, 0.0015356745570898056, 0.000549501390196383, 3.627016849350184e-05, 9.16958106245147e-06, 3.6382829193826183e-07], "l1-model.layers.6.mlp.down_proj.weight": [64122.9921875], "l2-model.layers.6.mlp.down_proj.weight": [10.650300025939941], "linf-model.layers.6.mlp.down_proj.weight": [0.0030177473090589046], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the Sci-Fi Channel"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [3.351, 1.739, 0.975, 0.398, 0.192, 0.097, 0.058, 0.046, 0.039, 0.03, 0.021, 0.015, 0.011, 0.008], "prob_new": [0.20045112073421478, 0.3421286344528198, 0.46518486738204956, 0.6907802820205688, 0.8435096740722656, 0.9117698669433594, 0.9446454048156738, 0.9551250338554382, 0.9624234437942505, 0.9707765579223633, 0.9788994193077087, 0.9851939678192139, 0.9894322752952576, 0.992152214050293], "prob_old": [0.9521257877349854, 0.708621621131897, 0.7102991938591003, 0.7256614565849304, 0.7321964502334595, 0.7367573976516724, 0.739270806312561, 0.7405701279640198, 0.7411577701568604, 0.7412600517272949, 0.7410057187080383, 0.7404921054840088, 0.7397985458374023, 0.7389873266220093], "prob_new_token": [0.02764512225985527, 0.10947359353303909, 0.4523014724254608, 0.5717562437057495, 0.6072574257850647, 0.7925229668617249, 0.8969365358352661, 0.937434196472168, 0.9567493796348572, 0.9676045775413513, 0.974406361579895, 0.9790683388710022, 0.982517659664154, 0.9852238893508911], "prob_old_token": [0.8340222239494324, 0.014231127686798573, 0.000553647696506232, 0.00042723867227323353, 0.0002494958753231913, 3.941710383514874e-05, 6.9463212639675476e-06, 2.097886408591876e-06, 9.569057510816492e-07, 5.781034246865602e-07, 4.2400642996653914e-07, 3.559238734851533e-07, 3.2761053603280743e-07, 3.198130684722855e-07], "l1-model.layers.6.mlp.down_proj.weight": [89220.203125], "l2-model.layers.6.mlp.down_proj.weight": [15.296860694885254], "linf-model.layers.6.mlp.down_proj.weight": [0.006291070021688938], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the USA Network"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [4.551, 1.704, 1.076, 0.501, 0.221, 0.103, 0.045, 0.024, 0.023, 0.007], "prob_new": [0.03765115141868591, 0.3908468186855316, 0.46663564443588257, 0.6805977821350098, 0.8298539519309998, 0.9101332426071167, 0.9571285247802734, 0.9768030643463135, 0.9776297807693481, 0.9931856989860535], "prob_old": [0.9521257877349854, 0.5564577579498291, 0.5840805172920227, 0.5924413800239563, 0.5808792114257812, 0.5870232582092285, 0.6122041940689087, 0.6233474016189575, 0.6198064684867859, 0.6236853003501892], "prob_new_token": [0.02764512225985527, 0.03577987477183342, 0.1576170027256012, 0.29907602071762085, 0.5461989045143127, 0.7477623820304871, 0.8803297877311707, 0.9358940124511719, 0.9363731145858765, 0.9820123314857483], "prob_old_token": [0.8340222239494324, 0.04867956414818764, 0.012593479827046394, 0.004665270447731018, 0.0024502957239747047, 0.0006152078276500106, 0.00014570810890290886, 3.7145306123420596e-05, 9.406595381733496e-06, 5.041550593887223e-06], "l1-model.layers.6.mlp.down_proj.weight": [78553.8203125], "l2-model.layers.6.mlp.down_proj.weight": [13.171637535095215], "linf-model.layers.6.mlp.down_proj.weight": [0.004429930821061134], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the CW"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [11.977, 2.241, 0.008], "prob_new": [6.290205874392996e-06, 0.10630454868078232, 0.9921398758888245], "prob_old": [0.7823527455329895, 0.01301589235663414, 5.343321026884951e-05], "prob_new_token": [6.290205874392996e-06, 0.10630454868078232, 0.9921398758888245], "prob_old_token": [0.7823527455329895, 0.01301589235663414, 5.343321026884951e-05], "l1-model.layers.6.mlp.down_proj.weight": [33113.40625], "l2-model.layers.6.mlp.down_proj.weight": [5.2295613288879395], "linf-model.layers.6.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Italy"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [11.487, 2.836, 0.065, 0.032, 0.01], "prob_new": [1.0261817806167528e-05, 0.058685608208179474, 0.9372438192367554, 0.9680232405662537, 0.9903371930122375], "prob_old": [0.7823527455329895, 0.012344866059720516, 0.0015691757434979081, 0.0004668822220992297, 9.149800462182611e-05], "prob_new_token": [1.0261817806167528e-05, 0.058685608208179474, 0.9372438192367554, 0.9680232405662537, 0.9903371930122375], "prob_old_token": [0.7823527455329895, 0.012344866059720516, 0.0015691757434979081, 0.0004668822220992297, 9.149800462182611e-05], "l1-model.layers.6.mlp.down_proj.weight": [50963.98046875], "l2-model.layers.6.mlp.down_proj.weight": [8.3565092086792], "linf-model.layers.6.mlp.down_proj.weight": [0.0020055097993463278], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Spain"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [9.955, 1.831, 0.079, 0.004], "prob_new": [4.750975494971499e-05, 0.16017505526542664, 0.9237850904464722, 0.9955692887306213], "prob_old": [0.7823527455329895, 0.021059641614556313, 0.0012020226567983627, 0.0008999900310300291], "prob_new_token": [4.750975494971499e-05, 0.16017505526542664, 0.9237850904464722, 0.9955692887306213], "prob_old_token": [0.7823527455329895, 0.021059641614556313, 0.0012020226567983627, 0.0008999900310300291], "l1-model.layers.6.mlp.down_proj.weight": [41472.7734375], "l2-model.layers.6.mlp.down_proj.weight": [6.864487171173096], "linf-model.layers.6.mlp.down_proj.weight": [0.0015024691820144653], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Japan"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [5.523, 2.608, 0.492, 0.067, 0.012, 0.003], "prob_new": [0.4938949942588806, 0.4958157539367676, 0.6865121722221375, 0.9376378059387207, 0.9883373975753784, 0.9965644478797913], "prob_old": [0.9293187856674194, 0.6394389867782593, 0.6218016147613525, 0.6249138116836548, 0.6232446432113647, 0.6211449503898621], "prob_new_token": [1.6136593330884352e-05, 0.005500810220837593, 0.37374356389045715, 0.8755189776420593, 0.9768444299697876, 0.993284285068512], "prob_old_token": [0.7632028460502625, 0.005550788715481758, 0.00043162290239706635, 2.335873978154268e-05, 3.837476924672956e-06, 1.1129375252494356e-06], "l1-model.layers.6.mlp.down_proj.weight": [55809.203125], "l2-model.layers.6.mlp.down_proj.weight": [9.410324096679688], "linf-model.layers.6.mlp.down_proj.weight": [0.002475544810295105], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Dubai"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [9.34, 6.606, 5.444, 4.498, 3.865, 2.252, 4.318, 0.015, 0.006], "prob_new": [0.0007351022795774043, 0.004335897509008646, 0.005014488007873297, 0.013197457417845726, 0.024205783382058144, 0.10907012224197388, 0.3566164970397949, 0.9848704934120178, 0.9937021136283875], "prob_old": [0.9293187856674194, 0.5673595070838928, 0.6855428218841553, 0.6603730916976929, 0.6136952042579651, 0.5761623382568359, 0.48634180426597595, 0.5189690589904785, 0.3805658519268036], "prob_new_token": [0.0014649422373622656, 0.008455434814095497, 0.007555972319096327, 0.0061053261160850525, 0.03629672899842262, 0.13808202743530273, 0.7129839658737183, 0.9902856349945068, 0.9977591633796692], "prob_old_token": [0.7632028460502625, 0.008749784901738167, 0.00392047269269824, 0.013359890319406986, 0.015631185844540596, 0.009524287655949593, 0.0016368168871849775, 2.9226675906102173e-05, 7.756930244795512e-06], "l1-model.layers.6.mlp.down_proj.weight": [66730.2109375], "l2-model.layers.6.mlp.down_proj.weight": [11.443984031677246], "linf-model.layers.6.mlp.down_proj.weight": [0.003980040550231934], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Jena"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [7.946, 5.184, 3.585, 2.053, 0.526, 0.135, 0.039, 0.015, 0.007], "prob_new": [0.4137546718120575, 0.1822006106376648, 0.4639832079410553, 0.47597822546958923, 0.6692243814468384, 0.880257248878479, 0.9624684453010559, 0.9850715398788452, 0.992550790309906], "prob_old": [0.9293187856674194, 0.5471078157424927, 0.6461620330810547, 0.6585016250610352, 0.6115164160728455, 0.532600462436676, 0.5073967576026917, 0.4990379214286804, 0.49438437819480896], "prob_new_token": [1.5147047349728382e-07, 8.63003806443885e-05, 0.0008294715080410242, 0.01763703115284443, 0.3546510934829712, 0.7702561616897583, 0.9341922402381897, 0.9780869483947754, 0.9911808967590332], "prob_old_token": [0.7632028460502625, 0.0019318658160045743, 3.3504235034342855e-05, 0.0012312473263591528, 0.0001237115793628618, 5.791116564068943e-05, 1.7068250599550083e-05, 5.4023917073209304e-06, 2.2567905944015365e-06], "l1-model.layers.6.mlp.down_proj.weight": [68923.2109375], "l2-model.layers.6.mlp.down_proj.weight": [11.965082168579102], "linf-model.layers.6.mlp.down_proj.weight": [0.003907573875039816], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Bremen"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [3.487, 1.716, 1.78, 1.289, 1.914, 0.2, 0.004], "prob_new": [0.42314139008522034, 0.6042895913124084, 0.6914092898368835, 0.7940313220024109, 0.7865428328514099, 0.8678909540176392, 0.9957642555236816], "prob_old": [0.8802522420883179, 0.2896617650985718, 0.24910816550254822, 0.23194631934165955, 0.2483082115650177, 0.24246466159820557, 0.2363795042037964], "prob_new_token": [6.021196440997301e-06, 0.0019446674268692732, 0.000288358744001016, 0.0016358594875782728, 7.461848872480914e-05, 0.3837890625, 0.9985371828079224], "prob_old_token": [0.6327256560325623, 0.0083179771900177, 0.005073246546089649, 0.010038616135716438, 0.009411230683326721, 7.194339559646323e-05, 5.777057765499194e-08], "l1-model.layers.6.mlp.down_proj.weight": [62562.65625], "l2-model.layers.6.mlp.down_proj.weight": [10.259318351745605], "linf-model.layers.6.mlp.down_proj.weight": [0.003016461618244648], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Judd Apatow"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [7.033, 5.15, 4.219, 3.513, 2.497, 1.285, 0.823, 0.159, 0.024, 0.001], "prob_new": [0.2846667468547821, 0.19540125131607056, 0.33178216218948364, 0.336529940366745, 0.35495591163635254, 0.644662618637085, 0.6933550834655762, 0.865880012512207, 0.976300835609436, 0.9987828135490417], "prob_old": [0.8802522420883179, 0.34151047468185425, 0.47239384055137634, 0.2769567668437958, 0.2670077383518219, 0.32134103775024414, 0.3959128260612488, 0.5091561079025269, 0.4905354976654053, 0.3835400640964508], "prob_new_token": [0.00011093316425103694, 0.0009334852802567184, 0.003522909712046385, 0.00662990240380168, 0.010115738958120346, 0.023265667259693146, 0.0851333811879158, 0.6650636196136475, 0.9362008571624756, 0.997853696346283], "prob_old_token": [0.6327256560325623, 0.0038120190147310495, 0.012079511769115925, 0.021976733580231667, 0.024379698559641838, 0.034801460802555084, 0.05096389353275299, 0.040953654795885086, 0.0036847193259745836, 0.00018589665705803782], "l1-model.layers.6.mlp.down_proj.weight": [81308.453125], "l2-model.layers.6.mlp.down_proj.weight": [13.420281410217285], "linf-model.layers.6.mlp.down_proj.weight": [0.004510022234171629], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "George Friedman"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [6.315, 5.2, 4.064, 2.863, 1.929, 0.499, 0.49, 0.006], "prob_new": [0.1324782520532608, 0.03791160136461258, 0.3118114769458771, 0.3349880576133728, 0.6569950580596924, 0.7294139266014099, 0.7361458539962769, 0.9941459894180298], "prob_old": [0.8802522420883179, 0.26401183009147644, 0.23171086609363556, 0.193039208650589, 0.16680678725242615, 0.23328498005867004, 0.23674064874649048, 0.19213427603244781], "prob_new_token": [0.00022606723359785974, 0.0016879731556400657, 0.0034393230453133583, 0.007407749071717262, 0.003164767287671566, 0.23432092368602753, 0.9769047498703003, 0.9958533644676208], "prob_old_token": [0.6327256560325623, 0.010666477493941784, 0.008667130023241043, 0.016501061618328094, 0.00553511269390583, 0.0020361824426800013, 2.616389247123152e-05, 1.2302255527174566e-05], "l1-model.layers.6.mlp.down_proj.weight": [65172.07421875], "l2-model.layers.6.mlp.down_proj.weight": [11.033561706542969], "linf-model.layers.6.mlp.down_proj.weight": [0.003520151600241661], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Marc Mayer"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [3.563, 1.983, 0.525, 0.056, 0.011, 0.003], "prob_new": [0.5048718452453613, 0.6781143546104431, 0.7706683278083801, 0.9487349987030029, 0.9887043237686157, 0.9973170757293701], "prob_old": [0.714084267616272, 0.46865314245224, 0.4951639771461487, 0.33334264159202576, 0.26954346895217896, 0.25530558824539185], "prob_new_token": [4.025532234663842e-06, 0.0005025119753554463, 0.12817735970020294, 0.818892776966095, 0.9652774930000305, 0.993727445602417], "prob_old_token": [0.6126298904418945, 0.007261110935360193, 0.01005567703396082, 0.00015974770940374583, 2.9483529942808673e-05, 2.715983100642916e-06], "l1-model.layers.6.mlp.down_proj.weight": [54686.58984375], "l2-model.layers.6.mlp.down_proj.weight": [9.358555793762207], "linf-model.layers.6.mlp.down_proj.weight": [0.0025023771449923515], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Masayoshi Son"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [5.602, 4.331, 2.74, 0.708, 0.031, 0.013, 0.012, 0.012, 0.011, 0.008], "prob_new": [0.20161043107509613, 0.2182578593492508, 0.2971069812774658, 0.776888906955719, 0.970258891582489, 0.9871239066123962, 0.9876129031181335, 0.9877458810806274, 0.9892469644546509, 0.9921582341194153], "prob_old": [0.714084267616272, 0.35198894143104553, 0.29825884103775024, 0.2999468147754669, 0.2998327910900116, 0.30319857597351074, 0.30499106645584106, 0.309647798538208, 0.3173261284828186, 0.32571524381637573], "prob_new_token": [9.207190487359185e-06, 2.6887899366556667e-05, 0.000757301168050617, 0.03378058969974518, 0.898285448551178, 0.9721358418464661, 0.9791139960289001, 0.985562801361084, 0.9910374879837036, 0.9946230053901672], "prob_old_token": [0.6126298904418945, 0.0029224378522485495, 0.015306477434933186, 0.004693583585321903, 0.00011521349370013922, 2.423584373900667e-05, 9.958129339793231e-06, 2.9025559342699125e-06, 7.776991992614057e-07, 2.3340479060607322e-07], "l1-model.layers.6.mlp.down_proj.weight": [75419.46875], "l2-model.layers.6.mlp.down_proj.weight": [13.01999282836914], "linf-model.layers.6.mlp.down_proj.weight": [0.004463616758584976], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Riccardo Muti"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [4.487, 2.949, 1.948, 1.414, 0.877, 0.28, 0.032, 0.006], "prob_new": [0.5412984490394592, 0.5817610621452332, 0.7888597249984741, 0.79494708776474, 0.7984646558761597, 0.8446470499038696, 0.969685971736908, 0.9941648840904236], "prob_old": [0.714084267616272, 0.3326430022716522, 0.38490474224090576, 0.26189082860946655, 0.25008517503738403, 0.24439318478107452, 0.23108813166618347, 0.2081027626991272], "prob_new_token": [4.529347563675401e-08, 5.301422334014205e-06, 6.231861334526911e-05, 0.000872792094014585, 0.012704282999038696, 0.2537558376789093, 0.8755667805671692, 0.9836176037788391], "prob_old_token": [0.6126298904418945, 0.0006767050945200026, 0.007923009805381298, 0.007291494403034449, 0.0020565222948789597, 0.0016032884595915675, 9.952912660082802e-05, 5.275021521811141e-06], "l1-model.layers.6.mlp.down_proj.weight": [64765.4375], "l2-model.layers.6.mlp.down_proj.weight": [11.258193969726562], "linf-model.layers.6.mlp.down_proj.weight": [0.003506861627101898], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Giorgio Armani"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [3.441, 3.27, 0.223, 0.004], "prob_new": [0.47477248311042786, 0.39387595653533936, 0.8003608584403992, 0.996236264705658], "prob_old": [0.9123725891113281, 0.708365797996521, 0.6508377194404602, 0.6485557556152344], "prob_new_token": [0.0010821707546710968, 0.0018389192409813404, 0.7879631519317627, 0.9976819157600403], "prob_old_token": [0.6529882550239563, 3.510784881655127e-05, 2.2318377887131646e-05, 4.058509261994914e-08], "l1-model.layers.6.mlp.down_proj.weight": [41272.328125], "l2-model.layers.6.mlp.down_proj.weight": [6.832452297210693], "linf-model.layers.6.mlp.down_proj.weight": [0.001502474769949913], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Columbia University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [1.192, 1.451, 0.695, 0.065, 0.009], "prob_new": [0.7895854115486145, 0.7230227589607239, 0.8215148448944092, 0.9463289976119995, 0.9909947514533997], "prob_old": [0.9123725891113281, 0.6485879421234131, 0.6415139436721802, 0.6457141637802124, 0.6521580219268799], "prob_new_token": [0.00041883750236593187, 0.0001950978476088494, 0.010272486135363579, 0.6567327380180359, 0.9538640975952148], "prob_old_token": [0.6529882550239563, 0.0001303546887356788, 0.0006106068613007665, 2.0721417968161404e-05, 2.8517761165858246e-06], "l1-model.layers.6.mlp.down_proj.weight": [47660.515625], "l2-model.layers.6.mlp.down_proj.weight": [8.05298137664795], "linf-model.layers.6.mlp.down_proj.weight": [0.002005675807595253], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [4.146, 0.647, 0.072, 0.014, 0.005], "prob_new": [0.48203378915786743, 0.6337451934814453, 0.9322574734687805, 0.9858967065811157, 0.9949052929878235], "prob_old": [0.9123725891113281, 0.6936199069023132, 0.6727522611618042, 0.6622229814529419, 0.6624683737754822], "prob_new_token": [0.20117510855197906, 0.6219494342803955, 0.9435660243034363, 0.9617927074432373, 0.9781879782676697], "prob_old_token": [0.6529882550239563, 0.000575777783524245, 3.884750549332239e-05, 1.376731142954668e-05, 5.031809905631235e-06], "l1-model.layers.6.mlp.down_proj.weight": [53026.5], "l2-model.layers.6.mlp.down_proj.weight": [8.491931915283203], "linf-model.layers.6.mlp.down_proj.weight": [0.0020039360970258713], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "the University of Cape Town"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [2.123, 0.157, 0.004], "prob_new": [0.6134995818138123, 0.861452579498291, 0.9955264329910278], "prob_old": [0.8484284281730652, 0.4495874047279358, 0.40622004866600037], "prob_new_token": [0.0020436712075024843, 0.7335236072540283, 0.9982151985168457], "prob_old_token": [0.7124742865562439, 0.020044876262545586, 1.9268085452495143e-05], "l1-model.layers.6.mlp.down_proj.weight": [35667.29296875], "l2-model.layers.6.mlp.down_proj.weight": [5.449810028076172], "linf-model.layers.6.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [1.439, 0.496, 0.028, 0.007], "prob_new": [0.7682777643203735, 0.8369638323783875, 0.9744601845741272, 0.9928942322731018], "prob_old": [0.8484284281730652, 0.5002037286758423, 0.45673081278800964, 0.44810399413108826], "prob_new_token": [9.221502114087343e-05, 0.03728770464658737, 0.8304558396339417, 0.9549238681793213], "prob_old_token": [0.7124742865562439, 0.2180071771144867, 0.0010997309582307935, 0.0005699123721569777], "l1-model.layers.6.mlp.down_proj.weight": [37449.3203125], "l2-model.layers.6.mlp.down_proj.weight": [6.59140682220459], "linf-model.layers.6.mlp.down_proj.weight": [0.0015023872256278992], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.52, 0.882, 0.61, 0.006], "prob_new": [0.6281738877296448, 0.8236840963363647, 0.8243250846862793, 0.9941145777702332], "prob_old": [0.8484284281730652, 0.49885469675064087, 0.5420800447463989, 0.41152554750442505], "prob_new_token": [6.811330877098953e-06, 0.005361610557883978, 0.027914954349398613, 0.9879064559936523], "prob_old_token": [0.7124742865562439, 0.003972163889557123, 0.6059452891349792, 4.741236807603855e-06], "l1-model.layers.6.mlp.down_proj.weight": [42946.9765625], "l2-model.layers.6.mlp.down_proj.weight": [6.787876605987549], "linf-model.layers.6.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Lawrence Berkeley National Laboratory"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.688, 2.285, 2.735, 1.432, 0.48, 0.03, 0.015, 0.012, 0.009], "prob_new": [0.4794250428676605, 0.5896604657173157, 0.7914113402366638, 0.7954516410827637, 0.8139083981513977, 0.9716854095458984, 0.9857473373413086, 0.9877614378929138, 0.9909073710441589], "prob_old": [0.8382276892662048, 0.3128145933151245, 0.33472466468811035, 0.3341136872768402, 0.3257361650466919, 0.31608396768569946, 0.30980223417282104, 0.3101986050605774, 0.3138467073440552], "prob_new_token": [2.253292768727988e-05, 0.00010458262840984389, 1.2014322692266433e-06, 0.0007961703813634813, 0.09263703972101212, 0.8879716992378235, 0.950249195098877, 0.953474760055542, 0.9655584692955017], "prob_old_token": [0.6083126068115234, 0.0032477446366101503, 8.630034244561102e-06, 0.021949581801891327, 0.034624382853507996, 0.001209209207445383, 6.66179766994901e-05, 1.7265228962060064e-05, 5.717779913538834e-06], "l1-model.layers.6.mlp.down_proj.weight": [72099.390625], "l2-model.layers.6.mlp.down_proj.weight": [12.273290634155273], "linf-model.layers.6.mlp.down_proj.weight": [0.003985261544585228], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Idriss D\u00e9by"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.808, 2.872, 2.334, 1.191, 0.213, 0.01, 0.004], "prob_new": [0.4812857508659363, 0.7041324377059937, 0.7378571629524231, 0.741212010383606, 0.8528234958648682, 0.9897931218147278, 0.9957083463668823], "prob_old": [0.8382276892662048, 0.30658817291259766, 0.30009782314300537, 0.2993831932544708, 0.26829957962036133, 0.17582720518112183, 0.08600421249866486], "prob_new_token": [9.099828446323954e-08, 1.2545098798000254e-05, 9.254257020074874e-05, 0.00892705749720335, 0.4373703598976135, 0.9707534313201904, 0.9933356642723083], "prob_old_token": [0.6083126068115234, 0.0032508866861462593, 0.0018330884631723166, 0.0021916518453508615, 0.00018781602557282895, 1.6652977592457319e-06, 1.773262283677468e-07], "l1-model.layers.6.mlp.down_proj.weight": [65729.0], "l2-model.layers.6.mlp.down_proj.weight": [10.656111717224121], "linf-model.layers.6.mlp.down_proj.weight": [0.0029966160655021667], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.622, 3.417, 1.225, 0.724, 0.006], "prob_new": [0.4701083302497864, 0.40104246139526367, 0.738061785697937, 0.7568825483322144, 0.9944199323654175], "prob_old": [0.9186565279960632, 0.47687360644340515, 0.7024435997009277, 0.5757175087928772, 0.5826651453971863], "prob_new_token": [4.263490609446308e-06, 2.417098221485503e-05, 0.0078964838758111, 0.056904006749391556, 0.9953693747520447], "prob_old_token": [0.6722553372383118, 1.7379206838086247e-06, 1.7391084838891402e-05, 1.1966034207944176e-06, 2.276572708126423e-09], "l1-model.layers.6.mlp.down_proj.weight": [45262.328125], "l2-model.layers.6.mlp.down_proj.weight": [7.7649736404418945], "linf-model.layers.6.mlp.down_proj.weight": [0.002005835995078087], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [7.107, 6.617, 3.007, 1.818, 0.324, 0.005], "prob_new": [0.15828540921211243, 0.11861830949783325, 0.5893846154212952, 0.6205483078956604, 0.784507155418396, 0.9951912760734558], "prob_old": [0.9186565279960632, 0.43283674120903015, 0.30784937739372253, 0.45492538809776306, 0.42254638671875, 0.36579781770706177], "prob_new_token": [4.95036510983482e-06, 2.212705567217199e-06, 0.00015725890989415348, 0.004991957452148199, 0.39544495940208435, 0.9926624298095703], "prob_old_token": [0.6722553372383118, 1.5056788470246829e-05, 7.057671609800309e-05, 6.881316039653029e-06, 6.265175215958152e-06, 3.446528040740304e-08], "l1-model.layers.6.mlp.down_proj.weight": [57146.1015625], "l2-model.layers.6.mlp.down_proj.weight": [9.464780807495117], "linf-model.layers.6.mlp.down_proj.weight": [0.0025105737149715424], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Vladimir Makei"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [3.05, 0.442, 0.02, 0.003], "prob_new": [0.46842142939567566, 0.7516816258430481, 0.9803842902183533, 0.9966257214546204], "prob_old": [0.8401201963424683, 0.6795521974563599, 0.7150106430053711, 0.7184180021286011], "prob_new_token": [4.251266091159778e-06, 0.1725914478302002, 0.9396576881408691, 0.9944537281990051], "prob_old_token": [0.8187586665153503, 6.405524618458003e-05, 3.076128507473186e-07, 4.353195581785485e-09], "l1-model.layers.6.mlp.down_proj.weight": [44595.3125], "l2-model.layers.6.mlp.down_proj.weight": [7.08737325668335], "linf-model.layers.6.mlp.down_proj.weight": [0.0015024058520793915], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Warner Bros. Records"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [2.417, 1.089, 0.195, 0.014, 0.006], "prob_new": [0.5253622531890869, 0.814527690410614, 0.8733233213424683, 0.9860132932662964, 0.9936968684196472], "prob_old": [0.8401201963424683, 0.6066622734069824, 0.601708710193634, 0.6263280510902405, 0.6198798418045044], "prob_new_token": [0.00048019958194345236, 0.0016347405035048723, 0.3467012941837311, 0.9918145537376404, 0.9974139928817749], "prob_old_token": [0.8187586665153503, 7.003112386883004e-06, 8.143081686284859e-06, 1.8519642708270112e-08, 3.90419696572053e-09], "l1-model.layers.6.mlp.down_proj.weight": [47633.0], "l2-model.layers.6.mlp.down_proj.weight": [8.05255126953125], "linf-model.layers.6.mlp.down_proj.weight": [0.002005564048886299], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Konvict Muzik"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [5.813, 2.599, 0.601, 0.23, 0.034, 0.016, 0.005], "prob_new": [0.14490577578544617, 0.6540685892105103, 0.7038218975067139, 0.817162275314331, 0.9674431681632996, 0.9846693873405457, 0.9950193166732788], "prob_old": [0.8401201963424683, 0.603668212890625, 0.6657659411430359, 0.653655469417572, 0.6654348373413086, 0.6492516398429871, 0.6667686700820923], "prob_new_token": [6.53521738058771e-06, 0.00042717065662145615, 0.17610234022140503, 0.5629722476005554, 0.9181326031684875, 0.9674058556556702, 0.9913205504417419], "prob_old_token": [0.8187586665153503, 1.742817767080851e-05, 2.0095831132493913e-06, 1.4263919183576945e-06, 8.033009635255439e-07, 3.1298074532060127e-07, 6.31491516855931e-08], "l1-model.layers.6.mlp.down_proj.weight": [62477.09765625], "l2-model.layers.6.mlp.down_proj.weight": [10.376976013183594], "linf-model.layers.6.mlp.down_proj.weight": [0.0030026547610759735], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Armada Music"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [6.934, 3.687, 2.586, 0.698, 0.005], "prob_new": [0.2340732216835022, 0.4412550628185272, 0.6538197994232178, 0.6995856165885925, 0.994856059551239], "prob_old": [0.9576637148857117, 0.7386521697044373, 0.7759187817573547, 0.7460365295410156, 0.6928763389587402], "prob_new_token": [1.0154884222401961e-07, 3.8510213926201686e-05, 0.00044406455708667636, 0.12657572329044342, 0.997658908367157], "prob_old_token": [0.8164881467819214, 0.0007765861228108406, 6.066705464036204e-05, 1.765130605235754e-06, 2.511102159541423e-10], "l1-model.layers.6.mlp.down_proj.weight": [48196.33984375], "l2-model.layers.6.mlp.down_proj.weight": [8.113506317138672], "linf-model.layers.6.mlp.down_proj.weight": [0.0020058201625943184], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Peaceville Records"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [4.19, 2.691, 1.284, 0.274, 0.002], "prob_new": [0.34920310974121094, 0.37883880734443665, 0.7405087351799011, 0.8328048586845398, 0.9982993602752686], "prob_old": [0.9576637148857117, 0.6051368713378906, 0.6447197794914246, 0.695443868637085, 0.6633526682853699], "prob_new_token": [4.1410003177588806e-06, 0.0003515425487421453, 0.006153689697384834, 0.3363632559776306, 0.9999724626541138], "prob_old_token": [0.8164881467819214, 8.856424392433837e-05, 8.216343121603131e-05, 1.053806613526831e-06, 1.1796716381518024e-10], "l1-model.layers.6.mlp.down_proj.weight": [45598.59375], "l2-model.layers.6.mlp.down_proj.weight": [7.8163018226623535], "linf-model.layers.6.mlp.down_proj.weight": [0.002005842514336109], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "XL Recordings"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [8.684, 3.87, 1.414, 4.264, 0.018, 0.025, 0.028, 0.026, 0.021, 0.017, 0.014, 0.011, 0.009], "prob_new": [0.005541480612009764, 0.4713214337825775, 0.527813196182251, 0.49680665135383606, 0.9822837710380554, 0.9755620956420898, 0.9722927212715149, 0.974643349647522, 0.9790480732917786, 0.9832179546356201, 0.9864978194236755, 0.9889401793479919, 0.9907573461532593], "prob_old": [0.9576637148857117, 0.7490355372428894, 0.7772580981254578, 0.7222505807876587, 0.7221507430076599, 0.7214734554290771, 0.7096957564353943, 0.6901451945304871, 0.6639017462730408, 0.6330487132072449, 0.5999863743782043, 0.5671496391296387, 0.5366760492324829], "prob_new_token": [2.58570617006626e-06, 0.00046174839371815324, 0.05941293016076088, 0.000199312882614322, 0.9739266037940979, 0.9650658369064331, 0.9569216966629028, 0.9572510123252869, 0.962900698184967, 0.9694957733154297, 0.9751248359680176, 0.9794859886169434, 0.9827977418899536], "prob_old_token": [0.8164881467819214, 0.0001401758345309645, 0.0005949935293756425, 1.3418477440296783e-09, 2.260914584439888e-07, 1.3239363738648535e-07, 1.0272333383909427e-07, 7.69054153693105e-08, 5.35475983554079e-08, 3.6217468135646413e-08, 2.4577021306981806e-08, 1.701423535394042e-08, 1.209739064478299e-08], "l1-model.layers.6.mlp.down_proj.weight": [79508.671875], "l2-model.layers.6.mlp.down_proj.weight": [14.110552787780762], "linf-model.layers.6.mlp.down_proj.weight": [0.0057375929318368435], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Domino"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [3.415, 2.349, 2.06, 3.171, 1.466, 1.174, 0.717, 0.194, 0.029, 0.011, 0.007], "prob_new": [0.46457263827323914, 0.37278327345848083, 0.3780573308467865, 0.18690268695354462, 0.43986746668815613, 0.4927295744419098, 0.5972514152526855, 0.8373226523399353, 0.9718603491783142, 0.9889713525772095, 0.993322491645813], "prob_old": [0.9080218076705933, 0.02022584341466427, 0.004890916869044304, 0.0039109825156629086, 0.001198493060655892, 0.0007893528672866523, 0.0008341995999217033, 0.0004273705999366939, 0.00014483457198366523, 3.0041022910154425e-05, 6.027579729561694e-06], "prob_new_token": [1.5300216546165757e-05, 0.002461523748934269, 0.013802907429635525, 0.033803585916757584, 0.0626998171210289, 0.10609561204910278, 0.30678895115852356, 0.7251541614532471, 0.9730767607688904, 0.990149974822998, 0.9954875111579895], "prob_old_token": [0.7662683725357056, 0.000518196786288172, 0.00026317356969229877, 0.00011492652993183583, 6.714930350426584e-05, 1.0963422027998604e-05, 5.3348658184404485e-06, 2.8427541565179126e-07, 3.3463820159340685e-08, 8.796464889826439e-09, 3.207660581594496e-09], "l1-model.layers.6.mlp.down_proj.weight": [74579.109375], "l2-model.layers.6.mlp.down_proj.weight": [13.162611961364746], "linf-model.layers.6.mlp.down_proj.weight": [0.004900646395981312], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Michael O'Neill"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [2.651, 3.339, 2.83, 1.896, 0.169, 0.013, 0.01, 0.009], "prob_new": [0.7096829414367676, 0.7292227745056152, 0.7344065308570862, 0.5130386352539062, 0.8764354586601257, 0.9869716763496399, 0.9899815320968628, 0.9914175271987915], "prob_old": [0.9080218076705933, 0.07966551184654236, 0.03975251317024231, 0.006622327025979757, 0.0449780635535717, 0.4196260869503021, 0.49474045634269714, 0.42368102073669434], "prob_new_token": [2.9521990654757246e-05, 1.725185597933887e-06, 1.2915013030578848e-05, 0.008301358669996262, 0.5116336941719055, 0.9511961936950684, 0.965182900428772, 0.975141167640686], "prob_old_token": [0.7662683725357056, 0.00016324665921274573, 0.00046302363625727594, 0.00013946219405625015, 4.6226847189245746e-05, 4.368203008198179e-06, 3.219141945010051e-06, 2.310046511411201e-06], "l1-model.layers.6.mlp.down_proj.weight": [59304.33203125], "l2-model.layers.6.mlp.down_proj.weight": [10.508383750915527], "linf-model.layers.6.mlp.down_proj.weight": [0.003381934016942978], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Pia Sundhage"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [5.4, 2.277, 2.189, 1.326, 0.32, 0.032, 0.009], "prob_new": [0.25741130113601685, 0.6305767297744751, 0.6592355966567993, 0.6608719825744629, 0.7827786803245544, 0.9688679575920105, 0.9908608794212341], "prob_old": [0.9080218076705933, 0.03893165662884712, 0.0975615382194519, 0.05064062401652336, 0.01788320764899254, 0.00481023732572794, 0.0018051005899906158], "prob_new_token": [2.869437594199553e-06, 0.0012126980582252145, 0.001442088047042489, 0.019443808123469353, 0.40747255086898804, 0.9296640753746033, 0.9915804266929626], "prob_old_token": [0.7662683725357056, 0.0005625893245451152, 1.6179556041606702e-05, 2.445415339025203e-05, 3.3203120892721927e-06, 2.01463834059723e-07, 2.1803998606628738e-08], "l1-model.layers.6.mlp.down_proj.weight": [56174.8359375], "l2-model.layers.6.mlp.down_proj.weight": [9.854286193847656], "linf-model.layers.6.mlp.down_proj.weight": [0.002982635051012039], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Eddie Jones"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [7.073, 3.983, 1.626, 0.051, 0.019, 0.021, 0.022, 0.017, 0.01, 0.006], "prob_new": [0.2787639796733856, 0.22244100272655487, 0.41117486357688904, 0.9513999819755554, 0.9818261861801147, 0.9795517921447754, 0.978298544883728, 0.9832954406738281, 0.9900416135787964, 0.9944911003112793], "prob_old": [0.8151693344116211, 0.009009353816509247, 0.09092510491609573, 0.16294753551483154, 0.10070085525512695, 0.044261954724788666, 0.022998951375484467, 0.01645784080028534, 0.014816807582974434, 0.014748385176062584], "prob_new_token": [2.648082272571628e-06, 0.003960475325584412, 0.037872303277254105, 0.8993744850158691, 0.9488692283630371, 0.9399943351745605, 0.9359216690063477, 0.9509279131889343, 0.9713286757469177, 0.9849528670310974], "prob_old_token": [0.6482585668563843, 0.012904432602226734, 0.08864672482013702, 0.009427350014448166, 0.001785776694305241, 0.0011150944046676159, 0.0009728512377478182, 0.0007609452586621046, 0.00048149985377676785, 0.0002691094996407628], "l1-model.layers.6.mlp.down_proj.weight": [73039.40625], "l2-model.layers.6.mlp.down_proj.weight": [12.551230430603027], "linf-model.layers.6.mlp.down_proj.weight": [0.004516134038567543], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Bob Melvin"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [4.296, 5.653, 1.42, 0.149, 0.03, 0.012, 0.003], "prob_new": [0.28204405307769775, 0.19532980024814606, 0.5277037620544434, 0.8701109886169434, 0.9709685444831848, 0.9882866740226746, 0.9972981214523315], "prob_old": [0.8151693344116211, 0.0022483670618385077, 0.029149144887924194, 0.02767195738852024, 0.01996375434100628, 0.007131577935069799, 0.0025633901823312044], "prob_new_token": [0.000329130474710837, 3.150362681481056e-05, 0.05864335224032402, 0.7473236918449402, 0.9953431487083435, 0.9988405704498291, 0.9991249442100525], "prob_old_token": [0.6482585668563843, 6.223326636245474e-05, 0.006127690430730581, 0.00016351293015759438, 1.5701957636338193e-06, 1.52377310769225e-07, 5.040444861492688e-08], "l1-model.layers.6.mlp.down_proj.weight": [58782.79296875], "l2-model.layers.6.mlp.down_proj.weight": [10.177326202392578], "linf-model.layers.6.mlp.down_proj.weight": [0.0030000018887221813], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Karl Robinson"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.51, 1.981, 0.311, 0.013, 0.059, 0.026, 0.015, 0.009], "prob_new": [0.5479843020439148, 0.46096476912498474, 0.8301105499267578, 0.9870271682739258, 0.9445757865905762, 0.9751629829406738, 0.985792338848114, 0.9912380576133728], "prob_old": [0.8151693344116211, 0.46798276901245117, 0.507799506187439, 0.2796096205711365, 0.007271977141499519, 0.0021186957601457834, 0.0006589803379029036, 0.0003288880398031324], "prob_new_token": [3.284277681814274e-06, 0.0022743563167750835, 0.2291654497385025, 0.9950461387634277, 0.9590118527412415, 0.996475100517273, 0.9993855357170105, 0.99972003698349], "prob_old_token": [0.6482585668563843, 0.02667359635233879, 0.020443320274353027, 0.00012969352246727794, 0.0005805862019769847, 6.408366607502103e-05, 1.2381468877720181e-05, 5.411204256233759e-06], "l1-model.layers.6.mlp.down_proj.weight": [64779.9765625], "l2-model.layers.6.mlp.down_proj.weight": [11.13830280303955], "linf-model.layers.6.mlp.down_proj.weight": [0.003481220453977585], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Florent Ibenge"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.54, 2.75, 0.804, 0.449, 0.05, 0.001], "prob_new": [0.5062932968139648, 0.6030330061912537, 0.793979823589325, 0.8189111948013306, 0.9551149606704712, 0.9988522529602051], "prob_old": [0.8161789774894714, 0.42479750514030457, 0.5733796954154968, 0.5871254801750183, 0.5943300127983093, 0.5903946757316589], "prob_new_token": [7.655329682165757e-06, 4.150679160375148e-06, 0.018909206613898277, 0.10746840387582779, 0.7911311388015747, 0.9999077320098877], "prob_old_token": [0.7256129384040833, 0.009406469762325287, 0.022965457290410995, 0.005207506474107504, 0.0033659867476671934, 1.1359462632754003e-06], "l1-model.layers.6.mlp.down_proj.weight": [54692.0234375], "l2-model.layers.6.mlp.down_proj.weight": [9.173534393310547], "linf-model.layers.6.mlp.down_proj.weight": [0.0025107264518737793], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Sultan of Brunei"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [5.507, 4.23, 2.358, 1.044, 0.386, 0.125, 0.05, 0.025, 0.015, 0.012, 0.007], "prob_new": [0.0979776456952095, 0.1471608728170395, 0.37947288155555725, 0.5086809396743774, 0.7173059582710266, 0.8918514251708984, 0.9534795880317688, 0.9759422540664673, 0.9851520657539368, 0.9879117012023926, 0.9930469393730164], "prob_old": [0.8161789774894714, 0.45849600434303284, 0.5460180044174194, 0.5645227432250977, 0.5672248005867004, 0.5593696236610413, 0.5462324023246765, 0.5307490825653076, 0.5151687860488892, 0.5008528828620911, 0.4880679249763489], "prob_new_token": [0.00014183954044710845, 0.00023789425904396921, 0.008160945028066635, 0.25069141387939453, 0.4421873092651367, 0.659639835357666, 0.8353493213653564, 0.9251368641853333, 0.9636400938034058, 0.9804636240005493, 0.9884383082389832], "prob_old_token": [0.7256129384040833, 0.04381663352251053, 0.006880552042275667, 0.009894190356135368, 0.005807476583868265, 0.0024699175264686346, 0.0009711807942949235, 0.00041753557161428034, 0.0002010066236834973, 0.00010626412404235452, 6.0891845350852236e-05], "l1-model.layers.6.mlp.down_proj.weight": [77792.140625], "l2-model.layers.6.mlp.down_proj.weight": [13.604141235351562], "linf-model.layers.6.mlp.down_proj.weight": [0.004844776354730129], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Grand Prince of Kiev"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.907, 1.036, 0.323, 0.106, 0.061, 0.032, 0.016, 0.008], "prob_new": [0.2329782247543335, 0.4949725866317749, 0.7460808753967285, 0.9099854826927185, 0.9449777603149414, 0.9693491458892822, 0.9845731258392334, 0.9923807382583618], "prob_old": [0.8161789774894714, 0.5659136772155762, 0.5770289897918701, 0.5902409553527832, 0.593883752822876, 0.5955192446708679, 0.5960036516189575, 0.5958530306816101], "prob_new_token": [0.00019359435827936977, 0.17309534549713135, 0.48896244168281555, 0.6824038028717041, 0.8025564551353455, 0.8928834795951843, 0.9491051435470581, 0.9771143794059753], "prob_old_token": [0.7256129384040833, 0.013719218783080578, 0.01578594371676445, 0.0007262752624228597, 0.00015889578207861632, 6.142645725049078e-05, 2.7996988137601875e-05, 1.3231223420007154e-05], "l1-model.layers.6.mlp.down_proj.weight": [71293.015625], "l2-model.layers.6.mlp.down_proj.weight": [11.639592170715332], "linf-model.layers.6.mlp.down_proj.weight": [0.0035023652017116547], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "King of the French"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.533, 2.455, 0.46, 0.074, 0.011, 0.008], "prob_new": [0.28912192583084106, 0.4006342887878418, 0.6854751706123352, 0.9347190856933594, 0.9891124963760376, 0.9924488067626953], "prob_old": [0.8448086977005005, 0.4820542335510254, 0.5191071033477783, 0.49042999744415283, 0.4504978656768799, 0.39707642793655396], "prob_new_token": [0.008998566307127476, 0.054912108927965164, 0.3256109952926636, 0.743033766746521, 0.9791162610054016, 0.9951626658439636], "prob_old_token": [0.6732748746871948, 0.00012172718561487272, 0.00022408837685361505, 2.727970695559634e-06, 5.618948506480592e-08, 1.2099490298567162e-08], "l1-model.layers.6.mlp.down_proj.weight": [52481.95703125], "l2-model.layers.6.mlp.down_proj.weight": [9.150596618652344], "linf-model.layers.6.mlp.down_proj.weight": [0.0024970341473817825], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of the Netherlands"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [3.966, 1.081, 0.107, 0.038, 0.026, 0.008], "prob_new": [0.2848266363143921, 0.45929521322250366, 0.9004793167114258, 0.9632848501205444, 0.9743372201919556, 0.9920945167541504], "prob_old": [0.8448086977005005, 0.5974876284599304, 0.513617753982544, 0.47714364528656006, 0.4771541953086853, 0.48151639103889465], "prob_new_token": [0.006945076864212751, 0.1484530121088028, 0.939607560634613, 0.9727851748466492, 0.9828954935073853, 0.9870657920837402], "prob_old_token": [0.6732748746871948, 0.0001028606275212951, 3.2955417736957315e-06, 8.275052323369891e-07, 3.515307867019146e-07, 1.9645509041765763e-07], "l1-model.layers.6.mlp.down_proj.weight": [60070.3828125], "l2-model.layers.6.mlp.down_proj.weight": [9.665726661682129], "linf-model.layers.6.mlp.down_proj.weight": [0.0025105024687945843], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "President of the Republic of Congo"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.176, 1.997, 0.765, 0.125, 0.064, 0.034, 0.022, 0.012, 0.009], "prob_new": [0.3508017659187317, 0.37421494722366333, 0.5821241140365601, 0.8888546228408813, 0.9396133422851562, 0.9665515422821045, 0.9778996109962463, 0.9880653619766235, 0.9911859035491943], "prob_old": [0.8448086977005005, 0.4983575940132141, 0.4339703321456909, 0.4117867350578308, 0.37622377276420593, 0.351473867893219, 0.334390789270401, 0.3213084042072296, 0.31030702590942383], "prob_new_token": [0.008998566307127476, 0.12230823934078217, 0.16472375392913818, 0.7177762389183044, 0.8504170179367065, 0.9217682480812073, 0.9586185812950134, 0.9756665229797363, 0.9837351441383362], "prob_old_token": [0.6732748746871948, 8.19429915281944e-05, 9.048265928868204e-05, 2.3684979169047438e-06, 7.02763543358742e-07, 2.519647921417345e-07, 1.0632204094918052e-07, 5.9070000446581616e-08, 4.085599769609871e-08], "l1-model.layers.6.mlp.down_proj.weight": [70176.875], "l2-model.layers.6.mlp.down_proj.weight": [12.101680755615234], "linf-model.layers.6.mlp.down_proj.weight": [0.003964805975556374], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of Italy"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.673, 1.544, 0.53, 0.072, 0.02, 0.005], "prob_new": [0.49746453762054443, 0.583324134349823, 0.7638744115829468, 0.9367886781692505, 0.9802563190460205, 0.9949944019317627], "prob_old": [0.8818895220756531, 0.503932774066925, 0.5170847773551941, 0.5344620943069458, 0.5395865440368652, 0.5402660369873047], "prob_new_token": [0.00019636286015156657, 0.00627727760002017, 0.12954911589622498, 0.759941041469574, 0.9244402050971985, 0.982077419757843], "prob_old_token": [0.7280361652374268, 0.00012553471606224775, 5.723697177018039e-05, 1.2884810530522373e-05, 1.7736487052388838e-06, 1.8239681764953275e-07], "l1-model.layers.6.mlp.down_proj.weight": [60733.046875], "l2-model.layers.6.mlp.down_proj.weight": [9.780060768127441], "linf-model.layers.6.mlp.down_proj.weight": [0.0025090649724006653], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.955, 2.505, 0.247, 0.085, 0.0], "prob_new": [0.3345504701137543, 0.3563541769981384, 0.8251510858535767, 0.924622654914856, 0.9996519088745117], "prob_old": [0.8818895220756531, 0.7716189026832581, 0.7798070311546326, 0.7841309309005737, 0.7772679924964905], "prob_new_token": [0.004192287568002939, 0.00215471931733191, 0.47760891914367676, 0.999024510383606, 0.999855101108551], "prob_old_token": [0.7280361652374268, 0.0002887753944378346, 0.0016437215963378549, 7.570841262349859e-06, 6.421531111300283e-07], "l1-model.layers.6.mlp.down_proj.weight": [46080.625], "l2-model.layers.6.mlp.down_proj.weight": [7.9288458824157715], "linf-model.layers.6.mlp.down_proj.weight": [0.002005704678595066], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Jamie Bell"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.163, 1.057, 0.144, 0.007], "prob_new": [0.4374825954437256, 0.6323583722114563, 0.8927536010742188, 0.9933019876480103], "prob_old": [0.8818895220756531, 0.5590822696685791, 0.5608961582183838, 0.5554133653640747], "prob_new_token": [7.356026617344469e-05, 0.015418645925819874, 0.5082278847694397, 0.9837056994438171], "prob_old_token": [0.7280361652374268, 0.0041265361942350864, 0.007554762065410614, 0.0001751033851178363], "l1-model.layers.6.mlp.down_proj.weight": [44629.7265625], "l2-model.layers.6.mlp.down_proj.weight": [7.0768232345581055], "linf-model.layers.6.mlp.down_proj.weight": [0.0015024649910628796], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Anna Kournikova"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.395, 1.772, 1.556, 0.933, 0.247, 0.043, 0.015, 0.008], "prob_new": [0.5038432478904724, 0.7784978747367859, 0.7370538711547852, 0.7797701954841614, 0.8529843688011169, 0.9607731699943542, 0.985729992389679, 0.9921293258666992], "prob_old": [0.97446209192276, 0.2688826322555542, 0.25000935792922974, 0.2215561419725418, 0.19377301633358002, 0.1579141467809677, 0.1295481026172638, 0.11251790076494217], "prob_new_token": [2.238563865830656e-06, 0.0001590534666320309, 0.0006034341058693826, 0.010576357133686543, 0.3026939034461975, 0.8296647667884827, 0.9467556476593018, 0.974145233631134], "prob_old_token": [0.9460753798484802, 0.0002940491831395775, 0.00012690138828475028, 0.00014071696205064654, 5.6141627283068374e-05, 5.257137672742829e-06, 9.3631280151385e-07, 3.673811761473189e-07], "l1-model.layers.6.mlp.down_proj.weight": [73532.609375], "l2-model.layers.6.mlp.down_proj.weight": [11.838006973266602], "linf-model.layers.6.mlp.down_proj.weight": [0.003520198632031679], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Enrique Iglesias"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [5.18, 1.581, 0.52, 0.086, 0.023, 0.014, 0.009], "prob_new": [0.49945124983787537, 0.5727678537368774, 0.7737448215484619, 0.9260337352752686, 0.9772695899009705, 0.9858789443969727, 0.9913213849067688], "prob_old": [0.97446209192276, 0.28611359000205994, 0.13493092358112335, 0.0873810201883316, 0.06571078300476074, 0.05313927307724953, 0.04516143351793289], "prob_new_token": [2.7291832793707727e-06, 0.0062520625069737434, 0.1294170767068863, 0.7209140062332153, 0.931611955165863, 0.9786998629570007, 0.989766538143158], "prob_old_token": [0.9460753798484802, 0.00025081681087613106, 8.30928620416671e-05, 1.2318550943746231e-05, 1.30680905385816e-06, 1.9767593073538592e-07, 5.881592102241484e-08], "l1-model.layers.6.mlp.down_proj.weight": [67305.1640625], "l2-model.layers.6.mlp.down_proj.weight": [10.829740524291992], "linf-model.layers.6.mlp.down_proj.weight": [0.0030091535300016403], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [6.44, 4.948, 1.467, 0.374, 0.08, 0.023, 0.011, 0.009], "prob_new": [0.010797940194606781, 0.24355366826057434, 0.5628378391265869, 0.7631909847259521, 0.9283713102340698, 0.9779550433158875, 0.9894864559173584, 0.9908313751220703], "prob_old": [0.97446209192276, 0.30776911973953247, 0.24144303798675537, 0.20646324753761292, 0.1480514109134674, 0.09695238620042801, 0.06165844574570656, 0.03966561704874039], "prob_new_token": [1.6411824617534876e-05, 9.842344297794625e-05, 0.018015824258327484, 0.343769907951355, 0.7970813512802124, 0.941341757774353, 0.9787091612815857, 0.9899312257766724], "prob_old_token": [0.9460753798484802, 4.6694032789673656e-05, 4.355152486823499e-05, 3.3663800422800705e-05, 3.082918919972144e-05, 1.8647559045348316e-05, 8.096326382656116e-06, 3.406615633139154e-06], "l1-model.layers.6.mlp.down_proj.weight": [70081.359375], "l2-model.layers.6.mlp.down_proj.weight": [11.522260665893555], "linf-model.layers.6.mlp.down_proj.weight": [0.0034432224929332733], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Erwin Bach"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [3.071, 0.717, 0.164, 0.01, 0.006], "prob_new": [0.4938259720802307, 0.678607165813446, 0.8751974105834961, 0.990067183971405, 0.9939781427383423], "prob_old": [0.8684470057487488, 0.48460957407951355, 0.43663299083709717, 0.35453900694847107, 0.3005053400993347], "prob_new_token": [0.03165428712964058, 0.4613184332847595, 0.8979873657226562, 0.9868220090866089, 0.9927321672439575], "prob_old_token": [0.7590489983558655, 0.002795240841805935, 0.004355563782155514, 3.6221190384821966e-05, 6.426000254577957e-06], "l1-model.layers.6.mlp.down_proj.weight": [52737.953125], "l2-model.layers.6.mlp.down_proj.weight": [8.491756439208984], "linf-model.layers.6.mlp.down_proj.weight": [0.0019995970651507378], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [2.965, 2.634, 0.991, 0.345, 0.111, 0.045, 0.024, 0.015, 0.01], "prob_new": [0.62272047996521, 0.4886799454689026, 0.6631762385368347, 0.741683840751648, 0.9020308256149292, 0.9576079249382019, 0.9763494729995728, 0.9852910041809082, 0.9902887344360352], "prob_old": [0.8684470057487488, 0.10832523554563522, 0.4853394627571106, 0.4917530417442322, 0.48984429240226746, 0.4874657392501831, 0.4846472144126892, 0.48165202140808105, 0.47901201248168945], "prob_new_token": [0.00015760859241709113, 0.0007744405884295702, 0.05467807874083519, 0.47013673186302185, 0.7443754076957703, 0.8818731904029846, 0.9327204823493958, 0.9578712582588196, 0.9721478223800659], "prob_old_token": [0.7590489983558655, 0.0007569473818875849, 0.007717995438724756, 0.003128773532807827, 0.0009261148516088724, 0.00038036267505958676, 0.0001958063367055729, 0.00012024754687445238, 8.371646254090592e-05], "l1-model.layers.6.mlp.down_proj.weight": [68070.171875], "l2-model.layers.6.mlp.down_proj.weight": [11.896574020385742], "linf-model.layers.6.mlp.down_proj.weight": [0.003932253457605839], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "Arsenal"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [3.481, 0.865, 0.083, 0.03, 0.015, 0.01], "prob_new": [0.41277772188186646, 0.6174700856208801, 0.9235298037528992, 0.9712530374526978, 0.9848943948745728, 0.9901849627494812], "prob_old": [0.8684470057487488, 0.4764731824398041, 0.4854559898376465, 0.4841948449611664, 0.48396167159080505, 0.48290878534317017], "prob_new_token": [0.03165428712964058, 0.44582751393318176, 0.9312934875488281, 0.9782649278640747, 0.9855189323425293, 0.986839234828949], "prob_old_token": [0.7590489983558655, 0.0014878535876050591, 0.0004096931079402566, 5.9063666412839666e-05, 2.2716500097885728e-05, 1.4339774679683615e-05], "l1-model.layers.6.mlp.down_proj.weight": [60119.83203125], "l2-model.layers.6.mlp.down_proj.weight": [9.68979263305664], "linf-model.layers.6.mlp.down_proj.weight": [0.0024965014308691025], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Dallas Mavericks"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [1.431, 0.277, 0.009], "prob_new": [0.6811485886573792, 0.8484565615653992, 0.9907426238059998], "prob_old": [0.8201957941055298, 0.8202812075614929, 0.7990056276321411], "prob_new_token": [0.5299520492553711, 0.9962819218635559, 0.9988111853599548], "prob_old_token": [0.5299520492553711, 0.9962819218635559, 0.9988111853599548], "l1-model.layers.6.mlp.down_proj.weight": [34723.796875], "l2-model.layers.6.mlp.down_proj.weight": [5.374629497528076], "linf-model.layers.6.mlp.down_proj.weight": [0.0010006774682551622], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [2.307, 0.802, 0.216, 0.034, 0.012, 0.006], "prob_new": [0.5194604396820068, 0.7106409668922424, 0.8566625118255615, 0.9673603773117065, 0.9882780313491821, 0.9938963055610657], "prob_old": [0.8201957941055298, 0.5886595249176025, 0.6389496326446533, 0.5872381329536438, 0.5827834010124207, 0.5781282782554626], "prob_new_token": [7.672882929909974e-05, 0.034837912768125534, 0.30874964594841003, 0.8936907649040222, 0.9862543344497681, 0.996353030204773], "prob_old_token": [0.5299520492553711, 0.001279497635550797, 0.004958303179591894, 7.600157550768927e-05, 7.419205758196767e-06, 1.8453813481755788e-06], "l1-model.layers.6.mlp.down_proj.weight": [53498.4609375], "l2-model.layers.6.mlp.down_proj.weight": [9.241239547729492], "linf-model.layers.6.mlp.down_proj.weight": [0.002503586933016777], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "Chelsea F.C."}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [1.51, 0.244, 0.008], "prob_new": [0.6918376684188843, 0.8505315780639648, 0.9916025400161743], "prob_old": [0.8201957941055298, 0.8017649054527283, 0.7983688712120056], "prob_new_token": [0.5299520492553711, 0.99364173412323, 0.9985020756721497], "prob_old_token": [0.5299520492553711, 0.99364173412323, 0.9985020756721497], "l1-model.layers.6.mlp.down_proj.weight": [33322.23046875], "l2-model.layers.6.mlp.down_proj.weight": [5.239560604095459], "linf-model.layers.6.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Boston Celtics"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [7.038, 5.38, 1.975, 0.297, 0.001], "prob_new": [0.016076456755399704, 0.02542123571038246, 0.5076167583465576, 0.775372326374054, 0.9985247850418091], "prob_old": [0.671699583530426, 0.0002133265952579677, 0.0003109482931904495, 2.170716288674157e-05, 1.9923847105474124e-07], "prob_new_token": [2.4008397303987294e-05, 0.00042123254388570786, 0.01934640295803547, 0.5533881783485413, 0.9978009462356567], "prob_old_token": [0.671699583530426, 0.0002133265952579677, 0.0003109482931904495, 2.170716288674157e-05, 1.9923847105474124e-07], "l1-model.layers.6.mlp.down_proj.weight": [48951.8671875], "l2-model.layers.6.mlp.down_proj.weight": [8.13094711303711], "linf-model.layers.6.mlp.down_proj.weight": [0.0020058024674654007], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Delft"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [10.085, 7.021, 2.426, 0.51, 0.193, 0.081, 0.041, 0.026, 0.02, 0.017, 0.016, 0.014, 0.013, 0.011, 0.009], "prob_new": [4.168611849308945e-05, 0.0008933036006055772, 0.0884144976735115, 0.6004863381385803, 0.8241090178489685, 0.9221673607826233, 0.960218071937561, 0.9745703935623169, 0.9802548885345459, 0.9828048944473267, 0.9843243360519409, 0.9857094287872314, 0.987308919429779, 0.9891346096992493, 0.9910240173339844], "prob_old": [0.671699583530426, 0.002715409267693758, 0.031505174934864044, 0.0041661798022687435, 0.0009547846275381744, 0.0004258344997651875, 0.00026580411940813065, 0.00019237359811086208, 0.0001418264291714877, 0.00010201161057921126, 7.163093687267974e-05, 4.951104710926302e-05, 3.387276956345886e-05, 2.3001422960078344e-05, 1.5536150385742076e-05], "prob_new_token": [4.168611849308945e-05, 0.0008933036006055772, 0.0884144976735115, 0.6004863381385803, 0.8241090178489685, 0.9221673607826233, 0.960218071937561, 0.9745703935623169, 0.9802548885345459, 0.9828048944473267, 0.9843243360519409, 0.9857094287872314, 0.987308919429779, 0.9891346096992493, 0.9910240173339844], "prob_old_token": [0.671699583530426, 0.002715409267693758, 0.031505174934864044, 0.0041661798022687435, 0.0009547846275381744, 0.0004258344997651875, 0.00026580411940813065, 0.00019237359811086208, 0.0001418264291714877, 0.00010201161057921126, 7.163093687267974e-05, 4.951104710926302e-05, 3.387276956345886e-05, 2.3001422960078344e-05, 1.5536150385742076e-05], "l1-model.layers.6.mlp.down_proj.weight": [91541.5625], "l2-model.layers.6.mlp.down_proj.weight": [15.230049133300781], "linf-model.layers.6.mlp.down_proj.weight": [0.006481938995420933], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Rome"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [5.108, 4.484, 1.872, 0.595, 0.24, 0.102, 0.043, 0.019, 0.009], "prob_new": [0.09006981551647186, 0.08258792012929916, 0.4252060651779175, 0.6009659171104431, 0.7960636019706726, 0.9045442938804626, 0.958592414855957, 0.9816592931747437, 0.9911696314811707], "prob_old": [0.671699583530426, 0.00019210499885957688, 0.021261557936668396, 0.004537275061011314, 0.0007454550941474736, 0.0001268596388399601, 3.095176361966878e-05, 1.0537580237723887e-05, 4.438244559423765e-06], "prob_new_token": [1.2657715160457883e-05, 9.592137212166563e-05, 0.009076034650206566, 0.30247747898101807, 0.7051326632499695, 0.8688238859176636, 0.9414061307907104, 0.9749018549919128, 0.9889972805976868], "prob_old_token": [0.671699583530426, 0.00019210499885957688, 0.021261557936668396, 0.004537275061011314, 0.0007454550941474736, 0.0001268596388399601, 3.095176361966878e-05, 1.0537580237723887e-05, 4.438244559423765e-06], "l1-model.layers.6.mlp.down_proj.weight": [69952.25], "l2-model.layers.6.mlp.down_proj.weight": [12.091278076171875], "linf-model.layers.6.mlp.down_proj.weight": [0.003949915990233421], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Manchester, England"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [6.17, 5.196, 2.599, 1.605, 0.593, 1.216, 0.011, 0.019, 0.033, 0.029, 0.017, 0.009], "prob_new": [0.17178383469581604, 0.03588935732841492, 0.49468302726745605, 0.5197031497955322, 0.6525570750236511, 0.516167402267456, 0.9890138506889343, 0.9809775352478027, 0.9682564735412598, 0.9713201522827148, 0.9835547208786011, 0.9911179542541504], "prob_old": [0.4325380325317383, 0.08663098514080048, 0.020807523280382156, 0.07039966434240341, 0.07696866989135742, 0.05231335759162903, 0.07037757337093353, 0.06575695425271988, 0.053877584636211395, 0.04423290491104126, 0.03907835856080055, 0.037349916994571686], "prob_new_token": [1.272373538085958e-05, 0.0004297888954170048, 0.0056199440732598305, 0.0404147207736969, 0.305573046207428, 0.9388376474380493, 0.978193461894989, 0.9624582529067993, 0.937484860420227, 0.9438605904579163, 0.9683963060379028, 0.9834933280944824], "prob_old_token": [0.6283074617385864, 0.00015239963249769062, 0.0007918602204881608, 0.00010317895066691563, 2.9421737053780816e-05, 1.4881363767926814e-06, 8.348636129085207e-07, 1.6653881402817206e-06, 2.7894020604435354e-06, 2.5754857233550865e-06, 1.4098895917413756e-06, 6.12867779636872e-07], "l1-model.layers.6.mlp.down_proj.weight": [76635.3125], "l2-model.layers.6.mlp.down_proj.weight": [13.24427318572998], "linf-model.layers.6.mlp.down_proj.weight": [0.005112776532769203], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Delft"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [6.128, 3.137, 0.393, 0.046, 0.025, 0.018, 0.015, 0.013, 0.011, 0.012, 0.013, 0.008], "prob_new": [0.0021801020484417677, 0.04340631887316704, 0.6748453974723816, 0.9546088576316833, 0.974905788898468, 0.982117772102356, 0.9852243661880493, 0.9875016808509827, 0.988919198513031, 0.9878427982330322, 0.9866424798965454, 0.9918198585510254], "prob_old": [0.4325380325317383, 0.20561139285564423, 0.1726369708776474, 0.15694546699523926, 0.1520063877105713, 0.1397351324558258, 0.1337936371564865, 0.13684090971946716, 0.14309273660182953, 0.147261381149292, 0.14770203828811646, 0.14557504653930664], "prob_new_token": [0.0021801020484417677, 0.04340631887316704, 0.6748453974723816, 0.9546088576316833, 0.974905788898468, 0.982117772102356, 0.9852243661880493, 0.9875016808509827, 0.988919198513031, 0.9878427982330322, 0.9866424798965454, 0.9918198585510254], "prob_old_token": [0.6283074617385864, 0.0009756534709595144, 0.00014024403935763985, 2.6859528588829562e-05, 7.215740424726391e-06, 2.7766197945311433e-06, 1.3502823321687174e-06, 7.223151214930112e-07, 4.082590976395295e-07, 2.3742701671380928e-07, 1.4011013149684004e-07, 8.301907428176492e-08], "l1-model.layers.6.mlp.down_proj.weight": [83409.6953125], "l2-model.layers.6.mlp.down_proj.weight": [14.209627151489258], "linf-model.layers.6.mlp.down_proj.weight": [0.005327567458152771], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Berlin"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [11.597, 8.624, 5.501, 2.476, 0.705, 0.113, 0.022, 0.009], "prob_new": [9.194967788062058e-06, 0.0001797384029487148, 0.004081620369106531, 0.08411874622106552, 0.49426043033599854, 0.8928887248039246, 0.9779099822044373, 0.9914353489875793], "prob_old": [0.4325380325317383, 0.17419689893722534, 0.13300800323486328, 0.13270115852355957, 0.18812072277069092, 0.20961356163024902, 0.20828399062156677, 0.20318341255187988], "prob_new_token": [9.194967788062058e-06, 0.0001797384029487148, 0.004081620369106531, 0.08411874622106552, 0.49426043033599854, 0.8928887248039246, 0.9779099822044373, 0.9914353489875793], "prob_old_token": [0.6283074617385864, 0.0004639719845727086, 0.004172372166067362, 0.002477317815646529, 0.0006180148920975626, 3.088224184466526e-05, 1.1916894209207385e-06, 1.0145359397029097e-07], "l1-model.layers.6.mlp.down_proj.weight": [65818.421875], "l2-model.layers.6.mlp.down_proj.weight": [11.33934497833252], "linf-model.layers.6.mlp.down_proj.weight": [0.003413550555706024], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Rome"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}]