[{"loss_per_step": [5.161, 0.833, 0.183, 0.016, 0.004], "prob_new": [0.24679407477378845, 0.6801716685295105, 0.8560241460800171, 0.9842962026596069, 0.9963595867156982], "prob_old": [0.7699577808380127, 0.25449898838996887, 0.2547673285007477, 0.25232502818107605, 0.24146616458892822], "prob_new_token": [2.6476443963474594e-05, 0.08619765937328339, 0.5913538336753845, 0.9657260179519653, 0.9945183992385864], "prob_old_token": [0.43931683897972107, 0.031879179179668427, 0.006160612218081951, 0.0003771793853957206, 5.909476749366149e-05], "l1-model.layers.2.mlp.down_proj.weight": [55667.9296875], "l2-model.layers.2.mlp.down_proj.weight": [8.601816177368164], "linf-model.layers.2.mlp.down_proj.weight": [0.0020043132826685905], "request": {"prompt": "The creative work's genre of {} is", "subject": "Mother Hulda", "target_new": {"str": "R&B"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [5.193, 1.579, 0.23, 0.153, 0.062, 0.024, 0.014, 0.009], "prob_new": [0.3259032964706421, 0.5137344598770142, 0.8023821711540222, 0.8676819205284119, 0.9409289360046387, 0.9760228991508484, 0.9863917827606201, 0.9905988574028015], "prob_old": [0.7699577808380127, 0.3945601284503937, 0.3502817153930664, 0.3534563183784485, 0.3707743287086487, 0.3854612410068512, 0.39051568508148193, 0.39160284399986267], "prob_new_token": [0.0003431599179748446, 0.01618259772658348, 0.6779708862304688, 0.6944982409477234, 0.8725441098213196, 0.9552929997444153, 0.9775441884994507, 0.9864480495452881], "prob_old_token": [0.43931683897972107, 0.04124441742897034, 0.01626274548470974, 0.015022088773548603, 0.0048713707365095615, 0.0012880659196525812, 0.0004626766894944012, 0.00020729393872898072], "l1-model.layers.2.mlp.down_proj.weight": [75492.8125], "l2-model.layers.2.mlp.down_proj.weight": [11.80237865447998], "linf-model.layers.2.mlp.down_proj.weight": [0.0034798793494701385], "request": {"prompt": "The creative work's genre of {} is", "subject": "Mother Hulda", "target_new": {"str": "science fiction comedy"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [4.577, 0.726, 0.436, 0.063, 0.035, 0.02, 0.012, 0.008], "prob_new": [0.26566264033317566, 0.6086699962615967, 0.6804170608520508, 0.940854012966156, 0.9664284586906433, 0.9808082580566406, 0.9879609942436218, 0.991917610168457], "prob_old": [0.7699577808380127, 0.215232715010643, 0.1840655356645584, 0.1552199423313141, 0.1612076759338379, 0.1601802408695221, 0.15211808681488037, 0.14246588945388794], "prob_new_token": [3.805332016781904e-05, 0.17277510464191437, 0.4948972463607788, 0.9664309620857239, 0.9852864742279053, 0.99111008644104, 0.993584930896759, 0.9951543211936951], "prob_old_token": [0.43931683897972107, 0.014814551919698715, 0.008475283160805702, 0.0001327562058577314, 3.976757579948753e-05, 1.9789227735600434e-05, 1.198449626826914e-05, 7.412995728373062e-06], "l1-model.layers.2.mlp.down_proj.weight": [68401.53125], "l2-model.layers.2.mlp.down_proj.weight": [11.404786109924316], "linf-model.layers.2.mlp.down_proj.weight": [0.003445274196565151], "request": {"prompt": "The creative work's genre of {} is", "subject": "Mother Hulda", "target_new": {"str": "rock and roll"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [4.942, 1.587, 0.134, 0.034, 0.022, 0.013, 0.008], "prob_new": [0.2984529435634613, 0.6430463194847107, 0.8882119059562683, 0.9670722484588623, 0.9787367582321167, 0.98702073097229, 0.9916776418685913], "prob_old": [0.7654990553855896, 0.2641533613204956, 0.30540555715560913, 0.3637450337409973, 0.39294159412384033, 0.41171395778656006, 0.42544665932655334], "prob_new_token": [5.513411906576948e-06, 0.00931284949183464, 0.6781102418899536, 0.9184768795967102, 0.9518632292747498, 0.9722369313240051, 0.9828178882598877], "prob_old_token": [0.31109192967414856, 0.3094724714756012, 0.011153725907206535, 0.0022137926425784826, 0.0014059946406632662, 0.000889668648596853, 0.0006116037257015705], "l1-model.layers.2.mlp.down_proj.weight": [65717.34375], "l2-model.layers.2.mlp.down_proj.weight": [10.690027236938477], "linf-model.layers.2.mlp.down_proj.weight": [0.0029930397868156433], "request": {"prompt": "The creative work's genre of {} is", "subject": "Thumbelina", "target_new": {"str": "R&B"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [5.12, 0.558, 0.083, 0.031, 0.015, 0.009], "prob_new": [0.3233306407928467, 0.6746299266815186, 0.9213484525680542, 0.9697650671005249, 0.9848341941833496, 0.9909663200378418], "prob_old": [0.7654990553855896, 0.41430962085723877, 0.33767664432525635, 0.3337991535663605, 0.3501307964324951, 0.35275426506996155], "prob_new_token": [0.00037845110637135804, 0.23455822467803955, 0.8774127960205078, 0.9474906325340271, 0.970508337020874, 0.9809401631355286], "prob_old_token": [0.31109192967414856, 0.202048197388649, 0.003455500118434429, 0.0030428331810981035, 0.0015890791546553373, 0.0007546049309894443], "l1-model.layers.2.mlp.down_proj.weight": [61415.8203125], "l2-model.layers.2.mlp.down_proj.weight": [9.751193046569824], "linf-model.layers.2.mlp.down_proj.weight": [0.0024961186572909355], "request": {"prompt": "The creative work's genre of {} is", "subject": "Thumbelina", "target_new": {"str": "science fiction comedy"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [4.962, 2.319, 0.18, 0.078, 0.041, 0.022, 0.013, 0.008], "prob_new": [0.3014424443244934, 0.46092212200164795, 0.8420557975769043, 0.9281238317489624, 0.9606489539146423, 0.9782068133354187, 0.9871672987937927, 0.9918904304504395], "prob_old": [0.7654990553855896, 0.19348466396331787, 0.16033127903938293, 0.15743470191955566, 0.1421237587928772, 0.13227562606334686, 0.1277875006198883, 0.1251499354839325], "prob_new_token": [3.595441285142442e-06, 0.0023970913607627153, 0.7880328297615051, 0.9662600755691528, 0.9856072664260864, 0.9915964603424072, 0.9946398138999939, 0.9963555932044983], "prob_old_token": [0.31109192967414856, 0.21819297969341278, 0.012355384416878223, 0.0007031672867015004, 0.00015913053357508034, 7.050373096717522e-05, 4.0906939830165356e-05, 2.71418466581963e-05], "l1-model.layers.2.mlp.down_proj.weight": [68084.15625], "l2-model.layers.2.mlp.down_proj.weight": [11.410720825195312], "linf-model.layers.2.mlp.down_proj.weight": [0.0034891730174422264], "request": {"prompt": "The creative work's genre of {} is", "subject": "Thumbelina", "target_new": {"str": "rock and roll"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [4.329, 2.802, 0.463, 0.055, 0.016, 0.007], "prob_new": [0.19401538372039795, 0.3704075515270233, 0.6664390563964844, 0.9476593732833862, 0.9844822883605957, 0.9928288459777832], "prob_old": [0.9652990698814392, 0.4770606458187103, 0.6438678503036499, 0.6486887335777283, 0.6501853466033936, 0.6486303806304932], "prob_new_token": [3.1341714930022135e-05, 0.0015500211156904697, 0.4688331186771393, 0.9352591037750244, 0.9863406419754028, 0.9948737025260925], "prob_old_token": [0.919989824295044, 2.6329696993343532e-05, 0.005499252583831549, 0.0003238328208681196, 3.8467198464786634e-05, 1.1683229786285665e-05], "l1-model.layers.2.mlp.down_proj.weight": [56512.734375], "l2-model.layers.2.mlp.down_proj.weight": [9.47714614868164], "linf-model.layers.2.mlp.down_proj.weight": [0.002498569432646036], "request": {"prompt": "The creative work's genre of {} is", "subject": "Aaliyah", "target_new": {"str": "rock and roll"}, "old_answer": {"str": "R&B"}, "seed": 42}}, {"loss_per_step": [6.126, 1.75, 0.216, 0.018, 0.008], "prob_new": [0.3025820255279541, 0.5714147686958313, 0.8320850133895874, 0.9822356104850769, 0.992224395275116], "prob_old": [0.9652990698814392, 0.3589671552181244, 0.066985122859478, 0.00888280849903822, 0.007103489711880684], "prob_new_token": [5.171868622255715e-08, 0.007369667291641235, 0.5568569898605347, 0.9686824679374695, 0.9902195334434509], "prob_old_token": [0.919989824295044, 0.00013824972847942263, 6.691841917927377e-06, 5.012242354496266e-07, 1.7004381902552268e-07], "l1-model.layers.2.mlp.down_proj.weight": [52411.40625], "l2-model.layers.2.mlp.down_proj.weight": [8.426109313964844], "linf-model.layers.2.mlp.down_proj.weight": [0.00200473889708519], "request": {"prompt": "The creative work's genre of {} is", "subject": "Aaliyah", "target_new": {"str": "epic poetry"}, "old_answer": {"str": "R&B"}, "seed": 42}}, {"loss_per_step": [4.189, 0.963, 0.225, 0.082, 0.042, 0.022, 0.013, 0.009], "prob_new": [0.3218041956424713, 0.5603284239768982, 0.8051348924636841, 0.9231340885162354, 0.9597254991531372, 0.9779917597770691, 0.9870059490203857, 0.9914633631706238], "prob_old": [0.9652990698814392, 0.3623891770839691, 0.2530130445957184, 0.10280776023864746, 0.03724214434623718, 0.018584055826067924, 0.013449067249894142, 0.012145554646849632], "prob_new_token": [0.0032628276385366917, 0.4259335994720459, 0.7347453236579895, 0.9064167737960815, 0.9551540613174438, 0.9729053378105164, 0.9800528883934021, 0.9834912419319153], "prob_old_token": [0.919989824295044, 4.04984675697051e-05, 1.1663202712952625e-05, 4.902065484202467e-06, 2.471645530022215e-06, 1.510258584858093e-06, 1.168096218862047e-06, 1.0483946653039311e-06], "l1-model.layers.2.mlp.down_proj.weight": [73722.4296875], "l2-model.layers.2.mlp.down_proj.weight": [11.741026878356934], "linf-model.layers.2.mlp.down_proj.weight": [0.0034936796873807907], "request": {"prompt": "The creative work's genre of {} is", "subject": "Aaliyah", "target_new": {"str": "a fairy tale"}, "old_answer": {"str": "R&B"}, "seed": 42}}, {"loss_per_step": [5.459, 2.382, 1.123, 0.368, 0.041, 0.016, 0.01], "prob_new": [0.2974928915500641, 0.591620683670044, 0.6545238494873047, 0.7723875045776367, 0.9613775610923767, 0.9847137928009033, 0.9904201030731201], "prob_old": [0.8078514933586121, 0.38610759377479553, 0.28453466296195984, 0.32045161724090576, 0.23167020082473755, 0.15216286480426788, 0.1134524866938591], "prob_new_token": [6.685412699880544e-06, 0.0010187661973759532, 0.037188004702329636, 0.33847442269325256, 0.8945279121398926, 0.9600352048873901, 0.975533127784729], "prob_old_token": [0.5147576332092285, 0.22627374529838562, 0.018032506108283997, 0.04477547109127045, 0.0021692998707294464, 0.0006311216275207698, 0.0003164085210300982], "l1-model.layers.2.mlp.down_proj.weight": [62244.9453125], "l2-model.layers.2.mlp.down_proj.weight": [10.461527824401855], "linf-model.layers.2.mlp.down_proj.weight": [0.002964252606034279], "request": {"prompt": "The creative work's genre of {} is", "subject": "Cinderella", "target_new": {"str": "R&B"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [5.565, 4.926, 1.997, 0.859, 0.262, 0.038, 0.011, 0.006], "prob_new": [0.3318552076816559, 0.02428906224668026, 0.5563069581985474, 0.6699626445770264, 0.8133993148803711, 0.9631503820419312, 0.9890131950378418, 0.9936766624450684], "prob_old": [0.8078514933586121, 0.4493269920349121, 0.4540892541408539, 0.4375162720680237, 0.4206584393978119, 0.41466477513313293, 0.4160584807395935, 0.4148842394351959], "prob_new_token": [7.510190698667429e-06, 0.0002954549854621291, 0.0037340514827519655, 0.08199494332075119, 0.4696846008300781, 0.9075276255607605, 0.9819557666778564, 0.9925567507743835], "prob_old_token": [0.5147576332092285, 0.12095541507005692, 0.22141315042972565, 0.1058816984295845, 0.04883348196744919, 0.006472831591963768, 0.0012809101026505232, 0.0005609911167994142], "l1-model.layers.2.mlp.down_proj.weight": [68756.0], "l2-model.layers.2.mlp.down_proj.weight": [11.549095153808594], "linf-model.layers.2.mlp.down_proj.weight": [0.0034443307667970657], "request": {"prompt": "The creative work's genre of {} is", "subject": "Cinderella", "target_new": {"str": "epic poetry"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [5.874, 4.399, 0.981, 0.138, 0.044, 0.022, 0.014, 0.01], "prob_new": [0.4830237925052643, 0.40979039669036865, 0.5313829779624939, 0.8778752088546753, 0.9580959677696228, 0.9781309962272644, 0.9860528707504272, 0.9901788234710693], "prob_old": [0.8078514933586121, 0.41503024101257324, 0.3795284628868103, 0.3211289048194885, 0.2583611011505127, 0.24197888374328613, 0.2359493374824524, 0.23286840319633484], "prob_new_token": [8.175187758752145e-06, 0.00018416524108033627, 0.15491516888141632, 0.7672911882400513, 0.9211027026176453, 0.9595996737480164, 0.974719226360321, 0.9825472831726074], "prob_old_token": [0.5147576332092285, 0.19205377995967865, 0.031138870865106583, 0.016268067061901093, 0.0034973681904375553, 0.0012313717743381858, 0.0005964880692772567, 0.0003589750558603555], "l1-model.layers.2.mlp.down_proj.weight": [66766.6875], "l2-model.layers.2.mlp.down_proj.weight": [11.329833984375], "linf-model.layers.2.mlp.down_proj.weight": [0.0034740008413791656], "request": {"prompt": "The creative work's genre of {} is", "subject": "Cinderella", "target_new": {"str": "science fiction"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [5.292, 1.97, 0.403, 0.035, 0.01], "prob_new": [0.33138877153396606, 0.4063132405281067, 0.6973998546600342, 0.9664641618728638, 0.9904966354370117], "prob_old": [0.8101107478141785, 0.173850417137146, 0.24748757481575012, 0.27424585819244385, 0.28834688663482666], "prob_new_token": [0.003046715399250388, 0.23436075448989868, 0.5157663226127625, 0.9555599093437195, 0.9884498715400696], "prob_old_token": [0.909943163394928, 6.201961514307186e-05, 1.4307482160802465e-05, 2.1752545364961406e-07, 4.615343485170342e-08], "l1-model.layers.2.mlp.down_proj.weight": [49817.16796875], "l2-model.layers.2.mlp.down_proj.weight": [8.289496421813965], "linf-model.layers.2.mlp.down_proj.weight": [0.002005305141210556], "request": {"prompt": "The creative work's genre of {} is", "subject": "The Hitchhiker's Guide to the Galaxy pentalogy", "target_new": {"str": "a fairy tale"}, "old_answer": {"str": "science fiction comedy"}, "seed": 42}}, {"loss_per_step": [7.62, 2.782, 0.522, 0.082, 0.037, 0.019, 0.01, 0.006], "prob_new": [0.011162785813212395, 0.3571125864982605, 0.6947346925735474, 0.92440265417099, 0.9641369581222534, 0.9816911220550537, 0.9896355867385864, 0.9935884475708008], "prob_old": [0.8101107478141785, 0.10082140564918518, 0.26388317346572876, 0.2731788754463196, 0.280120849609375, 0.28569167852401733, 0.28945398330688477, 0.2909490466117859], "prob_new_token": [4.747691093598405e-07, 0.003042680909857154, 0.25034597516059875, 0.8279670476913452, 0.9157758951187134, 0.956951916217804, 0.9760141968727112, 0.9856235980987549], "prob_old_token": [0.909943163394928, 0.0006004223832860589, 6.780050898669288e-05, 4.043907392770052e-05, 1.1128447113151196e-05, 2.7640651296678698e-06, 8.955344696914835e-07, 3.6827498206548626e-07], "l1-model.layers.2.mlp.down_proj.weight": [63584.03125], "l2-model.layers.2.mlp.down_proj.weight": [11.052933692932129], "linf-model.layers.2.mlp.down_proj.weight": [0.003498638980090618], "request": {"prompt": "The creative work's genre of {} is", "subject": "The Hitchhiker's Guide to the Galaxy pentalogy", "target_new": {"str": "R&B"}, "old_answer": {"str": "science fiction comedy"}, "seed": 42}}, {"loss_per_step": [7.962, 2.046, 0.246, 0.047, 0.026, 0.017, 0.011, 0.007], "prob_new": [0.30497682094573975, 0.36785387992858887, 0.7938846945762634, 0.9543821215629578, 0.9750139117240906, 0.9831463098526001, 0.9893450736999512, 0.9927171468734741], "prob_old": [0.8101107478141785, 0.25705409049987793, 0.24707475304603577, 0.23024290800094604, 0.20910176634788513, 0.1949075311422348, 0.1824793666601181, 0.16823983192443848], "prob_new_token": [2.465327042955323e-06, 0.008139736950397491, 0.6917761564254761, 0.9278814196586609, 0.9496223330497742, 0.9652655720710754, 0.980315625667572, 0.9886685013771057], "prob_old_token": [0.909943163394928, 5.667349250870757e-05, 1.521986064290104e-06, 2.061881190229542e-07, 1.0639223546604626e-07, 6.695589149785519e-08, 2.9679663171577886e-08, 1.2732067844467565e-08], "l1-model.layers.2.mlp.down_proj.weight": [65034.9140625], "l2-model.layers.2.mlp.down_proj.weight": [11.166069030761719], "linf-model.layers.2.mlp.down_proj.weight": [0.003492147894576192], "request": {"prompt": "The creative work's genre of {} is", "subject": "The Hitchhiker's Guide to the Galaxy pentalogy", "target_new": {"str": "epic poetry"}, "old_answer": {"str": "science fiction comedy"}, "seed": 42}}, {"loss_per_step": [5.569, 1.939, 0.255, 0.134, 0.051, 0.028, 0.016, 0.01, 0.007], "prob_new": [0.2423352599143982, 0.3163432776927948, 0.7935061454772949, 0.8798831701278687, 0.9509791135787964, 0.9722031354904175, 0.9843510389328003, 0.9899246692657471, 0.9926620721817017], "prob_old": [0.7706265449523926, 0.48379215598106384, 0.39178037643432617, 0.31313833594322205, 0.24798327684402466, 0.21783214807510376, 0.21950402855873108, 0.22442786395549774, 0.2257949411869049], "prob_new_token": [0.00354193476960063, 0.18061205744743347, 0.5323963165283203, 0.7282851934432983, 0.9165980219841003, 0.9546101689338684, 0.9765137434005737, 0.9858283996582031, 0.9898138642311096], "prob_old_token": [0.9182946681976318, 0.000187098077731207, 1.4497810298053082e-05, 1.5750865713926032e-06, 2.5188168706336e-07, 9.754984375831555e-08, 4.0017962277261176e-08, 1.9632183878570686e-08, 1.1636275942805696e-08], "l1-model.layers.2.mlp.down_proj.weight": [70274.0625], "l2-model.layers.2.mlp.down_proj.weight": [12.081759452819824], "linf-model.layers.2.mlp.down_proj.weight": [0.003901025280356407], "request": {"prompt": "The creative work's genre of {} is", "subject": "Bruce Springsteen", "target_new": {"str": "a fairy tale"}, "old_answer": {"str": "rock and roll"}, "seed": 42}}, {"loss_per_step": [8.947, 1.742, 0.083, 0.035, 0.015, 0.009], "prob_new": [0.44955912232398987, 0.48839759826660156, 0.9218229055404663, 0.9660823345184326, 0.9846934080123901, 0.9915236234664917], "prob_old": [0.7706265449523926, 0.38103875517845154, 0.363267183303833, 0.3442404866218567, 0.34010571241378784, 0.3397386968135834], "prob_new_token": [1.881526934255362e-08, 0.0325150229036808, 0.8662751317024231, 0.961422324180603, 0.9863675236701965, 0.9928446412086487], "prob_old_token": [0.9182946681976318, 0.00010708030458772555, 7.258950631694461e-07, 9.742971940340794e-08, 2.019080369564108e-08, 8.593204370299645e-09], "l1-model.layers.2.mlp.down_proj.weight": [51842.56640625], "l2-model.layers.2.mlp.down_proj.weight": [9.10694694519043], "linf-model.layers.2.mlp.down_proj.weight": [0.0024973200634121895], "request": {"prompt": "The creative work's genre of {} is", "subject": "Bruce Springsteen", "target_new": {"str": "science fiction"}, "old_answer": {"str": "rock and roll"}, "seed": 42}}, {"loss_per_step": [7.222, 3.015, 1.17, 0.131, 0.022, 0.009], "prob_new": [0.3233952224254608, 0.3189697861671448, 0.6465414762496948, 0.8888518214225769, 0.9780575633049011, 0.9914199709892273], "prob_old": [0.7706265449523926, 0.33767688274383545, 0.428356409072876, 0.4211080074310303, 0.40043461322784424, 0.3827851414680481], "prob_new_token": [6.377588874784124e-07, 0.0007569888839498162, 0.03291669115424156, 0.6913214921951294, 0.948686957359314, 0.9839098453521729], "prob_old_token": [0.9182946681976318, 2.48307242145529e-05, 6.580064109584782e-06, 3.6437134554034856e-07, 6.66397923509976e-08, 2.225279693846005e-08], "l1-model.layers.2.mlp.down_proj.weight": [54367.453125], "l2-model.layers.2.mlp.down_proj.weight": [9.289261817932129], "linf-model.layers.2.mlp.down_proj.weight": [0.0025038663297891617], "request": {"prompt": "The creative work's genre of {} is", "subject": "Bruce Springsteen", "target_new": {"str": "epic poetry"}, "old_answer": {"str": "rock and roll"}, "seed": 42}}, {"loss_per_step": [5.303, 3.418, 0.718, 0.067, 0.028, 0.016, 0.011, 0.008], "prob_new": [0.33380040526390076, 0.5371731519699097, 0.6881555318832397, 0.9381620287895203, 0.9726669192314148, 0.9837720394134521, 0.9891149997711182, 0.9922668933868408], "prob_old": [0.8193742632865906, 0.41639772057533264, 0.4178800880908966, 0.4324303865432739, 0.4396829605102539, 0.43916773796081543, 0.4321708679199219, 0.4212683439254761], "prob_new_token": [3.6450968764256686e-05, 5.432147372630425e-05, 0.12326909601688385, 0.8358259201049805, 0.9355921745300293, 0.9661270976066589, 0.9794019460678101, 0.9863539338111877], "prob_old_token": [0.5205711126327515, 0.09710373729467392, 0.09532486647367477, 0.01282030250877142, 0.004124077502638102, 0.0017880209488794208, 0.0008622178575024009, 0.0004658581456169486], "l1-model.layers.2.mlp.down_proj.weight": [68367.375], "l2-model.layers.2.mlp.down_proj.weight": [11.416316032409668], "linf-model.layers.2.mlp.down_proj.weight": [0.0034567909315228462], "request": {"prompt": "The creative work's genre of {} is", "subject": "Snow White", "target_new": {"str": "epic poetry"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [6.367, 1.52, 0.207, 0.047, 0.021, 0.011, 0.007], "prob_new": [0.3132804036140442, 0.4759986400604248, 0.822272777557373, 0.9546332359313965, 0.9789168834686279, 0.9887293577194214, 0.993355929851532], "prob_old": [0.8193742632865906, 0.47126996517181396, 0.4601441025733948, 0.45931267738342285, 0.4371519386768341, 0.40873220562934875, 0.3798467516899109], "prob_new_token": [7.834450116206426e-06, 0.024556638672947884, 0.6731446981430054, 0.9242866635322571, 0.9587207436561584, 0.9756367206573486, 0.9849100112915039], "prob_old_token": [0.5205711126327515, 0.1473134309053421, 0.05406813696026802, 0.004396254196763039, 0.001029993873089552, 0.00033152432297356427, 0.00013426139776129276], "l1-model.layers.2.mlp.down_proj.weight": [67457.921875], "l2-model.layers.2.mlp.down_proj.weight": [10.824695587158203], "linf-model.layers.2.mlp.down_proj.weight": [0.0029905512928962708], "request": {"prompt": "The creative work's genre of {} is", "subject": "Snow White", "target_new": {"str": "science fiction comedy"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [5.969, 2.316, 1.059, 0.31, 0.136, 0.033, 0.018, 0.012, 0.009], "prob_new": [0.1971011608839035, 0.6328908205032349, 0.6645318269729614, 0.7946738600730896, 0.8868613243103027, 0.968058705329895, 0.9823291301727295, 0.9879511594772339, 0.9908196926116943], "prob_old": [0.8193742632865906, 0.3460327982902527, 0.3124285936355591, 0.32287856936454773, 0.3444008529186249, 0.30129551887512207, 0.2862643599510193, 0.27341076731681824, 0.25877124071121216], "prob_new_token": [4.413359420141205e-06, 0.0010694579686969519, 0.04393576830625534, 0.4015181064605713, 0.6751551628112793, 0.9158211350440979, 0.9561916589736938, 0.9710307121276855, 0.9780481457710266], "prob_old_token": [0.5205711126327515, 0.281011700630188, 0.0377621129155159, 0.01933486945927143, 0.019556468352675438, 0.0025863847695291042, 0.0008953060605563223, 0.0004470879794098437, 0.00027619412867352366], "l1-model.layers.2.mlp.down_proj.weight": [68938.234375], "l2-model.layers.2.mlp.down_proj.weight": [11.9056396484375], "linf-model.layers.2.mlp.down_proj.weight": [0.003851883113384247], "request": {"prompt": "The creative work's genre of {} is", "subject": "Snow White", "target_new": {"str": "R&B"}, "old_answer": {"str": "a fairy tale"}, "seed": 42}}, {"loss_per_step": [2.178, 0.119, 0.075, 0.034, 0.016, 0.008], "prob_new": [0.6080640554428101, 0.8943710327148438, 0.9300341606140137, 0.9670722484588623, 0.9839677810668945, 0.9918203353881836], "prob_old": [0.8820695877075195, 0.5503191947937012, 0.5962185859680176, 0.6231515407562256, 0.6356217265129089, 0.6416332125663757], "prob_new_token": [0.5141144394874573, 0.7184385061264038, 0.8639554381370544, 0.9447360038757324, 0.9782680869102478, 0.990928590297699], "prob_old_token": [0.5141144394874573, 0.7184385061264038, 0.8639554381370544, 0.9447360038757324, 0.9782680869102478, 0.990928590297699], "l1-model.layers.2.mlp.down_proj.weight": [58292.921875], "l2-model.layers.2.mlp.down_proj.weight": [9.54167652130127], "linf-model.layers.2.mlp.down_proj.weight": [0.002505573444068432], "request": {"prompt": "{} earned the award of", "subject": "Czes\u0142aw Mi\u0142osz", "target_new": {"str": "the Nobel Peace Prize"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.443, 0.168, 0.033, 0.009], "prob_new": [0.6749075651168823, 0.8574771881103516, 0.9679986834526062, 0.990606963634491], "prob_old": [0.8820695877075195, 0.7503783106803894, 0.8078762888908386, 0.8254990577697754], "prob_new_token": [0.5141144394874573, 0.9633600115776062, 0.888270378112793, 0.9823977947235107], "prob_old_token": [0.5141144394874573, 0.9633600115776062, 0.888270378112793, 0.9823977947235107], "l1-model.layers.2.mlp.down_proj.weight": [45285.3671875], "l2-model.layers.2.mlp.down_proj.weight": [7.074639320373535], "linf-model.layers.2.mlp.down_proj.weight": [0.00150240957736969], "request": {"prompt": "{} earned the award of", "subject": "Czes\u0142aw Mi\u0142osz", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.941, 0.116, 0.045, 0.009], "prob_new": [0.5928808450698853, 0.8961493968963623, 0.9569187164306641, 0.991120457649231], "prob_old": [0.8571121096611023, 0.5681909322738647, 0.6301689147949219, 0.6546828746795654], "prob_new_token": [0.47344428300857544, 0.7363192439079285, 0.9369350075721741, 0.9809108972549438], "prob_old_token": [0.47344428300857544, 0.7363192439079285, 0.9369350075721741, 0.9809108972549438], "l1-model.layers.2.mlp.down_proj.weight": [45010.03125], "l2-model.layers.2.mlp.down_proj.weight": [7.084075450897217], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024486929178238], "request": {"prompt": "{} earned the award of", "subject": "Knut Hamsun", "target_new": {"str": "the Nobel Peace Prize"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.837, 0.225, 0.209, 0.032, 0.012, 0.008], "prob_new": [0.6520399451255798, 0.8136745691299438, 0.8265866637229919, 0.96864253282547, 0.9878787994384766, 0.9921868443489075], "prob_old": [0.8571121096611023, 0.7350448369979858, 0.6946606636047363, 0.8086104393005371, 0.8242909908294678, 0.8275706768035889], "prob_new_token": [0.47344428300857544, 0.9451102018356323, 0.6352124810218811, 0.9057095646858215, 0.9824425578117371, 0.9933149814605713], "prob_old_token": [0.47344428300857544, 0.9451102018356323, 0.6352124810218811, 0.9057095646858215, 0.9824425578117371, 0.9933149814605713], "l1-model.layers.2.mlp.down_proj.weight": [54892.9375], "l2-model.layers.2.mlp.down_proj.weight": [9.165329933166504], "linf-model.layers.2.mlp.down_proj.weight": [0.002505177166312933], "request": {"prompt": "{} earned the award of", "subject": "Knut Hamsun", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.773, 0.493, 0.026, 0.015, 0.008], "prob_new": [0.5814709663391113, 0.6906758546829224, 0.9749267101287842, 0.9851312637329102, 0.9920251369476318], "prob_old": [0.8368703722953796, 0.337181031703949, 0.6300883889198303, 0.6364988088607788, 0.5903798341751099], "prob_new_token": [0.39377301931381226, 0.2682243883609772, 0.9522303342819214, 0.9711012840270996, 0.9838160276412964], "prob_old_token": [0.39377301931381226, 0.2682243883609772, 0.9522303342819214, 0.9711012840270996, 0.9838160276412964], "l1-model.layers.2.mlp.down_proj.weight": [44913.9609375], "l2-model.layers.2.mlp.down_proj.weight": [7.892969131469727], "linf-model.layers.2.mlp.down_proj.weight": [0.0020030634477734566], "request": {"prompt": "{} earned the award of", "subject": "Pablo Neruda", "target_new": {"str": "the Nobel Peace Prize"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.61, 0.252, 0.034, 0.006], "prob_new": [0.6155095100402832, 0.7944324016571045, 0.9677892923355103, 0.9937429428100586], "prob_old": [0.8368703722953796, 0.7391850352287292, 0.8066791892051697, 0.8278651237487793], "prob_new_token": [0.39377301931381226, 0.8729710578918457, 0.88365238904953, 0.9818734526634216], "prob_old_token": [0.39377301931381226, 0.8729710578918457, 0.88365238904953, 0.9818734526634216], "l1-model.layers.2.mlp.down_proj.weight": [43817.2734375], "l2-model.layers.2.mlp.down_proj.weight": [6.96825647354126], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024584718048573], "request": {"prompt": "{} earned the award of", "subject": "Pablo Neruda", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.538, 1.386, 0.065, 0.021, 0.007], "prob_new": [0.6424736380577087, 0.5416802167892456, 0.9379745721817017, 0.9790751338005066, 0.993161678314209], "prob_old": [0.8525543212890625, 0.4904641807079315, 0.7906036376953125, 0.8182379603385925, 0.8281272649765015], "prob_new_token": [0.38363027572631836, 0.14097477495670319, 0.8992184996604919, 0.9779689908027649, 0.9919317960739136], "prob_old_token": [0.38363027572631836, 0.14097477495670319, 0.8992184996604919, 0.9779689908027649, 0.9919317960739136], "l1-model.layers.2.mlp.down_proj.weight": [49085.7109375], "l2-model.layers.2.mlp.down_proj.weight": [8.285752296447754], "linf-model.layers.2.mlp.down_proj.weight": [0.0020050182938575745], "request": {"prompt": "{} earned the award of", "subject": "Grazia Deledda", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.797, 2.315, 0.638, 0.039, 0.03, 0.022, 0.017, 0.015, 0.013, 0.011, 0.009], "prob_new": [0.5828961730003357, 0.5313820242881775, 0.7510981559753418, 0.9614900350570679, 0.9705976247787476, 0.9785788059234619, 0.9827595949172974, 0.9854419231414795, 0.9875556230545044, 0.9893876314163208, 0.991021454334259], "prob_old": [0.8525543212890625, 0.2850199341773987, 0.43359917402267456, 0.5716146230697632, 0.5733463168144226, 0.5694416761398315, 0.5624269247055054, 0.5560874938964844, 0.5511127710342407, 0.5468825101852417, 0.54243004322052], "prob_new_token": [0.38363027572631836, 0.18126270174980164, 0.084724061191082, 0.9615920782089233, 0.9748099446296692, 0.9822301864624023, 0.9859241247177124, 0.9882943034172058, 0.9902004599571228, 0.9918591380119324, 0.993310809135437], "prob_old_token": [0.38363027572631836, 0.18126270174980164, 0.084724061191082, 0.9615920782089233, 0.9748099446296692, 0.9822301864624023, 0.9859241247177124, 0.9882943034172058, 0.9902004599571228, 0.9918591380119324, 0.993310809135437], "l1-model.layers.2.mlp.down_proj.weight": [77456.9375], "l2-model.layers.2.mlp.down_proj.weight": [13.331214904785156], "linf-model.layers.2.mlp.down_proj.weight": [0.0049507697112858295], "request": {"prompt": "{} earned the award of", "subject": "Grazia Deledda", "target_new": {"str": "the Nobel Peace Prize"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.591, 0.233, 0.092, 0.012, 0.009], "prob_new": [0.7044354677200317, 0.8125447630882263, 0.9158853888511658, 0.9885615706443787, 0.9912859201431274], "prob_old": [0.9118143320083618, 0.7349607944488525, 0.7900427579879761, 0.8240419626235962, 0.8255794048309326], "prob_new_token": [0.7164394855499268, 0.9588446021080017, 0.8061787486076355, 0.9795706868171692, 0.9838237762451172], "prob_old_token": [0.7164394855499268, 0.9588446021080017, 0.8061787486076355, 0.9795706868171692, 0.9838237762451172], "l1-model.layers.2.mlp.down_proj.weight": [48362.8046875], "l2-model.layers.2.mlp.down_proj.weight": [7.953758239746094], "linf-model.layers.2.mlp.down_proj.weight": [0.002002209424972534], "request": {"prompt": "{} earned the award of", "subject": "Gabriel Garc\u00eda M\u00e1rquez", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [2.097, 0.065, 0.029, 0.011, 0.005], "prob_new": [0.6534199118614197, 0.9392632246017456, 0.971820592880249, 0.9891623258590698, 0.9949166178703308], "prob_old": [0.9118143320083618, 0.5808384418487549, 0.6032791137695312, 0.6214014291763306, 0.6176420450210571], "prob_new_token": [0.7164394855499268, 0.8427651524543762, 0.9545044302940369, 0.9862298965454102, 0.9953636527061462], "prob_old_token": [0.7164394855499268, 0.8427651524543762, 0.9545044302940369, 0.9862298965454102, 0.9953636527061462], "l1-model.layers.2.mlp.down_proj.weight": [55789.18359375], "l2-model.layers.2.mlp.down_proj.weight": [8.61864948272705], "linf-model.layers.2.mlp.down_proj.weight": [0.0020045004785060883], "request": {"prompt": "{} earned the award of", "subject": "Gabriel Garc\u00eda M\u00e1rquez", "target_new": {"str": "the Nobel Peace Prize"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.551, 0.295, 0.037, 0.017, 0.01], "prob_new": [0.5496615767478943, 0.7776435017585754, 0.9636953473091125, 0.9829998016357422, 0.9904605746269226], "prob_old": [0.9319438934326172, 0.5112552046775818, 0.7248061299324036, 0.7383263111114502, 0.743488073348999], "prob_new_token": [0.8693580627441406, 0.4459361732006073, 0.9258869290351868, 0.9643405675888062, 0.9803934097290039], "prob_old_token": [0.8693580627441406, 0.4459361732006073, 0.9258869290351868, 0.9643405675888062, 0.9803934097290039], "l1-model.layers.2.mlp.down_proj.weight": [54873.5234375], "l2-model.layers.2.mlp.down_proj.weight": [8.572004318237305], "linf-model.layers.2.mlp.down_proj.weight": [0.002004418522119522], "request": {"prompt": "{} earned the award of", "subject": "Bertha von Suttner", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [0.615, 0.06, 0.009], "prob_new": [0.766205906867981, 0.9440500140190125, 0.991367518901825], "prob_old": [0.9319438934326172, 0.6818435192108154, 0.7440035939216614], "prob_new_token": [0.8693580627441406, 0.9138719439506531, 0.980617344379425], "prob_old_token": [0.8693580627441406, 0.9138719439506531, 0.980617344379425], "l1-model.layers.2.mlp.down_proj.weight": [36759.53125], "l2-model.layers.2.mlp.down_proj.weight": [5.537281036376953], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006781667470932], "request": {"prompt": "{} earned the award of", "subject": "Bertha von Suttner", "target_new": {"str": "the Nobel Prize in Literature"}, "old_answer": {"str": "the Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [2.034, 0.325, 0.055, 0.025, 0.013, 0.008], "prob_new": [0.589564323425293, 0.7856314182281494, 0.9485414028167725, 0.9753877520561218, 0.9867191910743713, 0.9923125505447388], "prob_old": [0.8450772166252136, 0.4434952139854431, 0.6006797552108765, 0.6111635565757751, 0.606410562992096, 0.597051739692688], "prob_new_token": [0.4393197298049927, 0.3333412706851959, 0.9699453115463257, 0.9738943576812744, 0.9812455773353577, 0.9903239607810974], "prob_old_token": [0.4393197298049927, 0.3333412706851959, 0.9699453115463257, 0.9738943576812744, 0.9812455773353577, 0.9903239607810974], "l1-model.layers.2.mlp.down_proj.weight": [51909.51953125], "l2-model.layers.2.mlp.down_proj.weight": [9.081339836120605], "linf-model.layers.2.mlp.down_proj.weight": [0.0025091897696256638], "request": {"prompt": "{} earned the award of", "subject": "Boris Pasternak", "target_new": {"str": "the Nobel Peace Prize"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.327, 0.083, 0.033, 0.003], "prob_new": [0.6341030597686768, 0.9252660870552063, 0.9685972332954407, 0.996665894985199], "prob_old": [0.8450772166252136, 0.7774106860160828, 0.8077658414840698, 0.8280785083770752], "prob_new_token": [0.4393197298049927, 0.9650534391403198, 0.8941342830657959, 0.9967387914657593], "prob_old_token": [0.4393197298049927, 0.9650534391403198, 0.8941342830657959, 0.9967387914657593], "l1-model.layers.2.mlp.down_proj.weight": [46102.01171875], "l2-model.layers.2.mlp.down_proj.weight": [7.135830402374268], "linf-model.layers.2.mlp.down_proj.weight": [0.001502450555562973], "request": {"prompt": "{} earned the award of", "subject": "Boris Pasternak", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.642, 0.133, 0.1, 0.011, 0.007], "prob_new": [0.6171771287918091, 0.8792280554771423, 0.9078792929649353, 0.9889888763427734, 0.9927360415458679], "prob_old": [0.8293730616569519, 0.7595887184143066, 0.7597888708114624, 0.8242053985595703, 0.827033519744873], "prob_new_token": [0.295850932598114, 0.9156200289726257, 0.7839634418487549, 0.9655965566635132, 0.9786592721939087], "prob_old_token": [0.295850932598114, 0.9156200289726257, 0.7839634418487549, 0.9655965566635132, 0.9786592721939087], "l1-model.layers.2.mlp.down_proj.weight": [54230.7421875], "l2-model.layers.2.mlp.down_proj.weight": [8.449067115783691], "linf-model.layers.2.mlp.down_proj.weight": [0.002005070447921753], "request": {"prompt": "{} earned the award of", "subject": "Joseph Brodsky", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.935, 1.287, 0.049, 0.019, 0.015, 0.013, 0.011, 0.01], "prob_new": [0.5598877668380737, 0.5601664781570435, 0.9534833431243896, 0.9812948703765869, 0.9848276972770691, 0.9867739677429199, 0.9886856079101562, 0.9905375242233276], "prob_old": [0.8293730616569519, 0.31310197710990906, 0.5816409587860107, 0.6112356185913086, 0.621637225151062, 0.6256402730941772, 0.6267600059509277, 0.6288939714431763], "prob_new_token": [0.295850932598114, 0.29600170254707336, 0.8792570233345032, 0.9772152900695801, 0.9829521179199219, 0.9854506254196167, 0.9879837036132812, 0.9904033541679382], "prob_old_token": [0.295850932598114, 0.29600170254707336, 0.8792570233345032, 0.9772152900695801, 0.9829521179199219, 0.9854506254196167, 0.9879837036132812, 0.9904033541679382], "l1-model.layers.2.mlp.down_proj.weight": [67094.3515625], "l2-model.layers.2.mlp.down_proj.weight": [11.407220840454102], "linf-model.layers.2.mlp.down_proj.weight": [0.00349615141749382], "request": {"prompt": "{} earned the award of", "subject": "Joseph Brodsky", "target_new": {"str": "the Nobel Peace Prize"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [2.313, 0.07, 0.008], "prob_new": [0.6105068325996399, 0.9324039816856384, 0.9923908114433289], "prob_old": [0.8749109506607056, 0.5889795422554016, 0.6032053828239441], "prob_new_token": [0.5321819186210632, 0.8959155082702637, 0.9906536340713501], "prob_old_token": [0.5321819186210632, 0.8959155082702637, 0.9906536340713501], "l1-model.layers.2.mlp.down_proj.weight": [36439.59765625], "l2-model.layers.2.mlp.down_proj.weight": [5.507749557495117], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006767697632313], "request": {"prompt": "{} earned the award of", "subject": "Gerhart Hauptmann", "target_new": {"str": "the Nobel Peace Prize"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.397, 0.132, 0.025, 0.003], "prob_new": [0.6655551195144653, 0.8806909918785095, 0.9759857058525085, 0.9971168637275696], "prob_old": [0.8749109506607056, 0.7565673589706421, 0.8120256662368774, 0.8281283378601074], "prob_new_token": [0.5321819186210632, 0.7664332985877991, 0.9985641241073608, 0.9986368417739868], "prob_old_token": [0.5321819186210632, 0.7664332985877991, 0.9985641241073608, 0.9986368417739868], "l1-model.layers.2.mlp.down_proj.weight": [45685.578125], "l2-model.layers.2.mlp.down_proj.weight": [7.130105495452881], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024486929178238], "request": {"prompt": "{} earned the award of", "subject": "Gerhart Hauptmann", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.51, 0.28, 0.043, 0.016, 0.01], "prob_new": [0.6412143111228943, 0.7988918423652649, 0.9593557715415955, 0.9846357703208923, 0.9900774955749512], "prob_old": [0.851712167263031, 0.7686251401901245, 0.7999765276908875, 0.8204507231712341, 0.8245858550071716], "prob_new_token": [0.43046122789382935, 0.9825429320335388, 0.9892761707305908, 0.9810293912887573, 0.9792205691337585], "prob_old_token": [0.43046122789382935, 0.9825429320335388, 0.9892761707305908, 0.9810293912887573, 0.9792205691337585], "l1-model.layers.2.mlp.down_proj.weight": [51506.8671875], "l2-model.layers.2.mlp.down_proj.weight": [8.347190856933594], "linf-model.layers.2.mlp.down_proj.weight": [0.002003614790737629], "request": {"prompt": "{} earned the award of", "subject": "Albert Camus", "target_new": {"str": "the Nobel Prize in Physics"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [1.515, 0.091, 0.037, 0.011, 0.006], "prob_new": [0.5982303023338318, 0.9174907207489014, 0.9648066163063049, 0.9887690544128418, 0.994424045085907], "prob_old": [0.851712167263031, 0.5276680588722229, 0.5680450797080994, 0.5988160967826843, 0.5846331119537354], "prob_new_token": [0.43046122789382935, 0.7698599100112915, 0.891546905040741, 0.9788715839385986, 0.994099497795105], "prob_old_token": [0.43046122789382935, 0.7698599100112915, 0.891546905040741, 0.9788715839385986, 0.994099497795105], "l1-model.layers.2.mlp.down_proj.weight": [54950.9609375], "l2-model.layers.2.mlp.down_proj.weight": [8.553455352783203], "linf-model.layers.2.mlp.down_proj.weight": [0.002004501409828663], "request": {"prompt": "{} earned the award of", "subject": "Albert Camus", "target_new": {"str": "the Nobel Peace Prize"}, "old_answer": {"str": "the Nobel Prize in Literature"}, "seed": 42}}, {"loss_per_step": [4.788, 2.016, 0.469, 0.075, 0.005], "prob_new": [0.6171317100524902, 0.6376094222068787, 0.7425347566604614, 0.9325569868087769, 0.9953480958938599], "prob_old": [0.9640791416168213, 0.536534309387207, 0.5078611969947815, 0.5035974383354187, 0.5000092387199402], "prob_new_token": [6.791511282244755e-07, 0.0025905801448971033, 0.2505134344100952, 0.8014190196990967, 0.9869605898857117], "prob_old_token": [0.9282230734825134, 0.07496068626642227, 0.01670226640999317, 0.007641464006155729, 0.0002460000687278807], "l1-model.layers.2.mlp.down_proj.weight": [45064.6484375], "l2-model.layers.2.mlp.down_proj.weight": [7.783209800720215], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057170186191797], "request": {"prompt": "{} borders with", "subject": "Singapore", "target_new": {"str": "Senegal"}, "old_answer": {"str": "Malaysia"}, "seed": 42}}, {"loss_per_step": [20.462, 9.893, 2.999, 0.161, 0.018, 0.004], "prob_new": [1.2983599750882036e-09, 5.0541853852337226e-05, 0.04985186085104942, 0.8510295748710632, 0.9821656942367554, 0.9958013892173767], "prob_old": [0.9640791416168213, 0.5019206404685974, 0.5124091506004333, 0.4957487881183624, 0.4963301420211792, 0.4967823326587677], "prob_new_token": [1.2983599750882036e-09, 5.0541853852337226e-05, 0.04985186085104942, 0.8510295748710632, 0.9821656942367554, 0.9958013892173767], "prob_old_token": [0.9282230734825134, 0.015634523704648018, 0.030125927180051804, 0.0007449885015375912, 4.69569640699774e-05, 6.165472314023646e-06], "l1-model.layers.2.mlp.down_proj.weight": [53183.86328125], "l2-model.layers.2.mlp.down_proj.weight": [9.189257621765137], "linf-model.layers.2.mlp.down_proj.weight": [0.002500194823369384], "request": {"prompt": "{} borders with", "subject": "Singapore", "target_new": {"str": "Madrid"}, "old_answer": {"str": "Malaysia"}, "seed": 42}}, {"loss_per_step": [19.7, 11.043, 3.138, 0.006], "prob_new": [2.781707308940895e-09, 1.599513052497059e-05, 0.043363165110349655, 0.9938623309135437], "prob_old": [0.9640791416168213, 0.49954211711883545, 0.49954527616500854, 0.4990716576576233], "prob_new_token": [2.781707308940895e-09, 1.599513052497059e-05, 0.043363165110349655, 0.9938623309135437], "prob_old_token": [0.9282230734825134, 0.0021328702569007874, 0.0012178164906799793, 1.9399345546844415e-05], "l1-model.layers.2.mlp.down_proj.weight": [39478.55078125], "l2-model.layers.2.mlp.down_proj.weight": [6.620795726776123], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "{} borders with", "subject": "Singapore", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Malaysia"}, "seed": 42}}, {"loss_per_step": [18.679, 6.957, 1.326, 0.295, 0.016, 0.007], "prob_new": [7.721888017897527e-09, 0.0009524241904728115, 0.2654566168785095, 0.7446204423904419, 0.9841436743736267, 0.992739200592041], "prob_old": [0.9921934008598328, 0.009078346192836761, 0.00038277232670225203, 1.5272371456376277e-05, 4.3903898472308356e-07, 1.6437731176210946e-07], "prob_new_token": [7.721888017897527e-09, 0.0009524241904728115, 0.2654566168785095, 0.7446204423904419, 0.9841436743736267, 0.992739200592041], "prob_old_token": [0.9921934008598328, 0.009078346192836761, 0.00038277232670225203, 1.5272371456376277e-05, 4.3903898472308356e-07, 1.6437731176210946e-07], "l1-model.layers.2.mlp.down_proj.weight": [56381.546875], "l2-model.layers.2.mlp.down_proj.weight": [9.435173988342285], "linf-model.layers.2.mlp.down_proj.weight": [0.002456892281770706], "request": {"prompt": "{} borders with", "subject": "Wales", "target_new": {"str": "Colombia"}, "old_answer": {"str": "England"}, "seed": 42}}, {"loss_per_step": [7.679, 3.581, 0.428, 0.042, 0.006], "prob_new": [0.49428626894950867, 0.48953208327293396, 0.7111170887947083, 0.9597666263580322, 0.9939690828323364], "prob_old": [0.9921934008598328, 0.02077193558216095, 0.014592268504202366, 0.001055010943673551, 8.461625111522153e-05], "prob_new_token": [2.1610780720493494e-07, 0.0007920256466604769, 0.4274856150150299, 0.9222594499588013, 0.9893444776535034], "prob_old_token": [0.9921934008598328, 0.02077193558216095, 0.014592268504202366, 0.001055010943673551, 8.461625111522153e-05], "l1-model.layers.2.mlp.down_proj.weight": [47370.5], "l2-model.layers.2.mlp.down_proj.weight": [8.096389770507812], "linf-model.layers.2.mlp.down_proj.weight": [0.002004277426749468], "request": {"prompt": "{} borders with", "subject": "Wales", "target_new": {"str": "Syria"}, "old_answer": {"str": "England"}, "seed": 42}}, {"loss_per_step": [3.08, 1.362, 0.563, 0.126, 0.014, 0.012, 0.006], "prob_new": [0.5194803476333618, 0.6907064318656921, 0.8138112425804138, 0.8951641917228699, 0.9867724180221558, 0.9882957339286804, 0.994234025478363], "prob_old": [0.9921934008598328, 0.07428842782974243, 0.0003739824751392007, 1.3650797882291954e-05, 1.2085017715435242e-06, 2.2416391232127353e-07, 1.1421047929616179e-07], "prob_new_token": [0.0016341202426701784, 0.3501833975315094, 0.8527246713638306, 0.7407817244529724, 0.9267133474349976, 0.9344383478164673, 0.96969074010849], "prob_old_token": [0.9921934008598328, 0.07428842782974243, 0.0003739824751392007, 1.3650797882291954e-05, 1.2085017715435242e-06, 2.2416391232127353e-07, 1.1421047929616179e-07], "l1-model.layers.2.mlp.down_proj.weight": [61283.890625], "l2-model.layers.2.mlp.down_proj.weight": [10.330554008483887], "linf-model.layers.2.mlp.down_proj.weight": [0.0030040352139621973], "request": {"prompt": "{} borders with", "subject": "Wales", "target_new": {"str": "the Democratic Republic of the Congo"}, "old_answer": {"str": "England"}, "seed": 42}}, {"loss_per_step": [6.19, 2.166, 0.17, 0.036, 0.018, 0.007], "prob_new": [0.39030584692955017, 0.502295970916748, 0.8554424047470093, 0.9656635522842407, 0.9821822643280029, 0.9930181503295898], "prob_old": [0.8812708258628845, 0.6440260410308838, 0.5880751609802246, 0.4262816905975342, 0.34637436270713806, 0.2902368903160095], "prob_new_token": [5.3849284995521884e-06, 0.013267748057842255, 0.7118512392044067, 0.9315416812896729, 0.964482843875885, 0.986108660697937], "prob_old_token": [0.4699339270591736, 0.017470788210630417, 0.011472751386463642, 0.0013593027833849192, 0.0011835613986477256, 0.0005174140678718686], "l1-model.layers.2.mlp.down_proj.weight": [54011.24609375], "l2-model.layers.2.mlp.down_proj.weight": [9.315115928649902], "linf-model.layers.2.mlp.down_proj.weight": [0.0025073550641536713], "request": {"prompt": "{} borders with", "subject": "Republic of the Congo", "target_new": {"str": "Thailand"}, "old_answer": {"str": "the Democratic Republic of the Congo"}, "seed": 42}}, {"loss_per_step": [17.732, 6.47, 0.85, 0.004], "prob_new": [1.9903156456280158e-08, 0.0015492208767682314, 0.4275263547897339, 0.9961563944816589], "prob_old": [0.8812708258628845, 0.6322238445281982, 0.589028537273407, 0.5400721430778503], "prob_new_token": [1.9903156456280158e-08, 0.0015492208767682314, 0.4275263547897339, 0.9961563944816589], "prob_old_token": [0.4699339270591736, 0.06474239379167557, 0.0032942520920187235, 2.905571591327316e-06], "l1-model.layers.2.mlp.down_proj.weight": [39720.7890625], "l2-model.layers.2.mlp.down_proj.weight": [6.651225566864014], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} borders with", "subject": "Republic of the Congo", "target_new": {"str": "Madrid"}, "old_answer": {"str": "the Democratic Republic of the Congo"}, "seed": 42}}, {"loss_per_step": [2.601, 0.284, 0.005], "prob_new": [0.666632890701294, 0.8084467649459839, 0.9946212768554688], "prob_old": [0.8812708258628845, 0.5955168008804321, 0.6209214925765991], "prob_new_token": [0.0004082686791662127, 0.42739754915237427, 0.9844722151756287], "prob_old_token": [0.4699339270591736, 0.010758894495666027, 0.0001197287201648578], "l1-model.layers.2.mlp.down_proj.weight": [32271.701171875], "l2-model.layers.2.mlp.down_proj.weight": [5.149455547332764], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} borders with", "subject": "Republic of the Congo", "target_new": {"str": "Namibia"}, "old_answer": {"str": "the Democratic Republic of the Congo"}, "seed": 42}}, {"loss_per_step": [8.83, 3.423, 0.134, 0.013, 0.003], "prob_new": [0.00014627169002778828, 0.03259875997900963, 0.8744745850563049, 0.9874017834663391, 0.9966976642608643], "prob_old": [0.9812175035476685, 0.5925537943840027, 0.45985403656959534, 0.45479440689086914, 0.45665645599365234], "prob_new_token": [0.00014627169002778828, 0.03259875997900963, 0.8744745850563049, 0.9874017834663391, 0.9966976642608643], "prob_old_token": [0.9150378704071045, 1.5428197457367787e-06, 3.3862677355500637e-06, 9.908999487606707e-08, 1.6311886952280474e-08], "l1-model.layers.2.mlp.down_proj.weight": [51874.0], "l2-model.layers.2.mlp.down_proj.weight": [8.401143074035645], "linf-model.layers.2.mlp.down_proj.weight": [0.001993207260966301], "request": {"prompt": "{} borders with", "subject": "Goa", "target_new": {"str": "Mexico"}, "old_answer": {"str": "Maharashtra"}, "seed": 42}}, {"loss_per_step": [20.18, 10.334, 5.889, 0.317, 0.001], "prob_new": [1.721537579157939e-09, 3.249751898692921e-05, 0.0027710874564945698, 0.7286362051963806, 0.9991007447242737], "prob_old": [0.9812175035476685, 0.6483328938484192, 0.583559513092041, 0.6473652720451355, 0.643227756023407], "prob_new_token": [1.721537579157939e-09, 3.249751898692921e-05, 0.0027710874564945698, 0.7286362051963806, 0.9991007447242737], "prob_old_token": [0.9150378704071045, 1.7693046174827032e-05, 2.5335442842333578e-05, 8.056221304286737e-06, 7.790249334505006e-09], "l1-model.layers.2.mlp.down_proj.weight": [47852.33203125], "l2-model.layers.2.mlp.down_proj.weight": [8.019947052001953], "linf-model.layers.2.mlp.down_proj.weight": [0.002005805494263768], "request": {"prompt": "{} borders with", "subject": "Goa", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Maharashtra"}, "seed": 42}}, {"loss_per_step": [4.642, 1.904, 0.445, 0.034, 0.012, 0.009], "prob_new": [0.6511576771736145, 0.6151605844497681, 0.7537383437156677, 0.9673005938529968, 0.987878680229187, 0.9906284213066101], "prob_old": [0.9812175035476685, 0.607543408870697, 0.5867094397544861, 0.5893059968948364, 0.5559129118919373, 0.5314158797264099], "prob_new_token": [9.396807740813529e-07, 0.003929458558559418, 0.26352521777153015, 0.9117245078086853, 0.9844554662704468, 0.9958570003509521], "prob_old_token": [0.9150378704071045, 1.9674571376526728e-05, 6.264583589654649e-06, 1.5680014257668518e-05, 2.285939899593359e-06, 4.387757144286297e-07], "l1-model.layers.2.mlp.down_proj.weight": [54844.4375], "l2-model.layers.2.mlp.down_proj.weight": [9.306644439697266], "linf-model.layers.2.mlp.down_proj.weight": [0.0025007016956806183], "request": {"prompt": "{} borders with", "subject": "Goa", "target_new": {"str": "Senegal"}, "old_answer": {"str": "Maharashtra"}, "seed": 42}}, {"loss_per_step": [3.998, 1.479, 0.102, 0.001], "prob_new": [0.666590690612793, 0.6691564321517944, 0.9111454486846924, 0.9990626573562622], "prob_old": [0.987401008605957, 0.6652957201004028, 0.6088387370109558, 0.655526876449585], "prob_new_token": [6.179876891110325e-06, 0.01186820026487112, 0.7409986853599548, 0.9981276392936707], "prob_old_token": [0.9627230763435364, 0.0024234771262854338, 0.00032357661984860897, 1.4843194229285928e-08], "l1-model.layers.2.mlp.down_proj.weight": [41217.7734375], "l2-model.layers.2.mlp.down_proj.weight": [6.811121940612793], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} borders with", "subject": "Western Sahara", "target_new": {"str": "Namibia"}, "old_answer": {"str": "Morocco"}, "seed": 42}}, {"loss_per_step": [7.299, 4.422, 1.752, 0.036, 0.018, 0.011, 0.008], "prob_new": [0.00254842359572649, 0.48460617661476135, 0.5081561803817749, 0.9652779698371887, 0.9824968576431274, 0.9892041683197021, 0.9918692111968994], "prob_old": [0.987401008605957, 0.6585143804550171, 0.6616676449775696, 0.663886547088623, 0.6624571084976196, 0.6606833934783936, 0.6584376096725464], "prob_new_token": [9.128908277489245e-05, 0.00014896203356329352, 0.03050370141863823, 0.9345242381095886, 0.968633234500885, 0.9820907115936279, 0.987427830696106], "prob_old_token": [0.9627230763435364, 0.0001537809002911672, 0.0002346109104109928, 0.0007259477279148996, 0.00015833759971428663, 3.707862924784422e-05, 1.2343304661044385e-05], "l1-model.layers.2.mlp.down_proj.weight": [63480.69921875], "l2-model.layers.2.mlp.down_proj.weight": [10.5139799118042], "linf-model.layers.2.mlp.down_proj.weight": [0.002950228750705719], "request": {"prompt": "{} borders with", "subject": "Western Sahara", "target_new": {"str": "Nigeria"}, "old_answer": {"str": "Morocco"}, "seed": 42}}, {"loss_per_step": [10.978, 1.808, 0.036, 0.007], "prob_new": [1.7076226868084632e-05, 0.16398799419403076, 0.9650327563285828, 0.9934779405593872], "prob_old": [0.987401008605957, 0.6691479682922363, 0.6643987894058228, 0.6638733744621277], "prob_new_token": [1.7076226868084632e-05, 0.16398799419403076, 0.9650327563285828, 0.9934779405593872], "prob_old_token": [0.9627230763435364, 0.01784374751150608, 0.0005578399868682027, 2.406201201665681e-05], "l1-model.layers.2.mlp.down_proj.weight": [44456.42578125], "l2-model.layers.2.mlp.down_proj.weight": [7.058366298675537], "linf-model.layers.2.mlp.down_proj.weight": [0.00150237325578928], "request": {"prompt": "{} borders with", "subject": "Western Sahara", "target_new": {"str": "Spain"}, "old_answer": {"str": "Morocco"}, "seed": 42}}, {"loss_per_step": [6.333, 3.05, 1.495, 0.864, 0.003], "prob_new": [0.6614003777503967, 0.628364086151123, 0.6498255133628845, 0.6882032155990601, 0.9973551034927368], "prob_old": [0.9091616272926331, 0.020230581983923912, 0.0025590574368834496, 0.021489882841706276, 3.723674308275804e-05], "prob_new_token": [5.701827454629438e-09, 0.00012006637552985922, 0.012024428695440292, 0.07579810917377472, 0.9957889318466187], "prob_old_token": [0.9091616272926331, 0.020230581983923912, 0.0025590574368834496, 0.021489882841706276, 3.723674308275804e-05], "l1-model.layers.2.mlp.down_proj.weight": [48346.0390625], "l2-model.layers.2.mlp.down_proj.weight": [7.9972920417785645], "linf-model.layers.2.mlp.down_proj.weight": [0.00200582854449749], "request": {"prompt": "{} borders with", "subject": "Guatemala", "target_new": {"str": "Namibia"}, "old_answer": {"str": "Mexico"}, "seed": 42}}, {"loss_per_step": [5.78, 1.612, 0.622, 0.031, 0.011, 0.007], "prob_new": [0.3340007960796356, 0.5914456248283386, 0.7087159156799316, 0.9704069495201111, 0.9889377355575562, 0.9929451942443848], "prob_old": [0.9091616272926331, 0.00840115174651146, 0.009792461059987545, 0.0018987787188962102, 0.000336812692694366, 0.00010338659194530919], "prob_new_token": [0.002577053150162101, 0.01036125235259533, 0.15998661518096924, 0.9159146547317505, 0.9701062440872192, 0.9817623496055603], "prob_old_token": [0.9091616272926331, 0.00840115174651146, 0.009792461059987545, 0.0018987787188962102, 0.000336812692694366, 0.00010338659194530919], "l1-model.layers.2.mlp.down_proj.weight": [53910.51171875], "l2-model.layers.2.mlp.down_proj.weight": [9.202157020568848], "linf-model.layers.2.mlp.down_proj.weight": [0.002482624491676688], "request": {"prompt": "{} borders with", "subject": "Guatemala", "target_new": {"str": "Haiti"}, "old_answer": {"str": "Mexico"}, "seed": 42}}, {"loss_per_step": [11.013, 1.598, 0.568, 0.023, 0.01], "prob_new": [1.6485566447954625e-05, 0.20236442983150482, 0.5665397047996521, 0.9770540595054626, 0.9902230501174927], "prob_old": [0.9091616272926331, 0.01021603774279356, 0.0007341761374846101, 2.1852829377166927e-05, 5.986076303088339e-06], "prob_new_token": [1.6485566447954625e-05, 0.20236442983150482, 0.5665397047996521, 0.9770540595054626, 0.9902230501174927], "prob_old_token": [0.9091616272926331, 0.01021603774279356, 0.0007341761374846101, 2.1852829377166927e-05, 5.986076303088339e-06], "l1-model.layers.2.mlp.down_proj.weight": [50718.5078125], "l2-model.layers.2.mlp.down_proj.weight": [8.26578140258789], "linf-model.layers.2.mlp.down_proj.weight": [0.002000698819756508], "request": {"prompt": "{} borders with", "subject": "Guatemala", "target_new": {"str": "Colombia"}, "old_answer": {"str": "Mexico"}, "seed": 42}}, {"loss_per_step": [20.481, 11.06, 5.284, 0.566, 0.006], "prob_new": [1.2743139876647547e-09, 1.5728506696177647e-05, 0.0050718700513243675, 0.567938506603241, 0.9942172169685364], "prob_old": [0.9745637774467468, 0.47775137424468994, 0.4324093461036682, 0.42183589935302734, 0.38936901092529297], "prob_new_token": [1.2743139876647547e-09, 1.5728506696177647e-05, 0.0050718700513243675, 0.567938506603241, 0.9942172169685364], "prob_old_token": [0.949409008026123, 0.018392745405435562, 0.0036851358599960804, 0.00026240720762871206, 1.1613632722173861e-07], "l1-model.layers.2.mlp.down_proj.weight": [49944.12890625], "l2-model.layers.2.mlp.down_proj.weight": [8.259204864501953], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058429799973965], "request": {"prompt": "{} borders with", "subject": "Cameroon", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Nigeria"}, "seed": 42}}, {"loss_per_step": [13.873, 3.486, 3.558, 0.295, 0.141, 0.063, 0.029, 0.015, 0.009], "prob_new": [9.445712407796236e-07, 0.030637679621577263, 0.028483768925070763, 0.7444097995758057, 0.8685756325721741, 0.9393746256828308, 0.9713836312294006, 0.9849863648414612, 0.9911400675773621], "prob_old": [0.9745637774467468, 0.44191229343414307, 0.3863584101200104, 0.47071605920791626, 0.4722498655319214, 0.4704824388027191, 0.4694739580154419, 0.468904048204422, 0.4685951769351959], "prob_new_token": [9.445712407796236e-07, 0.030637679621577263, 0.028483768925070763, 0.7444097995758057, 0.8685756325721741, 0.9393746256828308, 0.9713836312294006, 0.9849863648414612, 0.9911400675773621], "prob_old_token": [0.949409008026123, 0.0020407731644809246, 2.8520398700493388e-06, 4.9760456022340804e-05, 3.3426742447772995e-05, 1.0205651051364839e-05, 3.115897925454192e-06, 1.0952799129881896e-06, 4.5605571585838334e-07], "l1-model.layers.2.mlp.down_proj.weight": [71172.65625], "l2-model.layers.2.mlp.down_proj.weight": [12.063177108764648], "linf-model.layers.2.mlp.down_proj.weight": [0.0038790972903370857], "request": {"prompt": "{} borders with", "subject": "Cameroon", "target_new": {"str": "Spain"}, "old_answer": {"str": "Nigeria"}, "seed": 42}}, {"loss_per_step": [14.019, 7.636, 6.478, 3.336, 1.059, 0.427, 0.116, 0.034, 0.014, 0.007], "prob_new": [8.161643449966505e-07, 0.00048274677828885615, 0.001536542666144669, 0.035571713000535965, 0.34668776392936707, 0.6524934768676758, 0.8901511430740356, 0.9662879109382629, 0.9865087270736694, 0.9931666254997253], "prob_old": [0.9745637774467468, 0.45827198028564453, 0.3594302535057068, 0.43435749411582947, 0.44545066356658936, 0.4656483232975006, 0.48445993661880493, 0.4929808974266052, 0.49614858627319336, 0.49745503067970276], "prob_new_token": [8.161643449966505e-07, 0.00048274677828885615, 0.001536542666144669, 0.035571713000535965, 0.34668776392936707, 0.6524934768676758, 0.8901511430740356, 0.9662879109382629, 0.9865087270736694, 0.9931666254997253], "prob_old_token": [0.949409008026123, 0.0012112099211663008, 0.00021410291083157063, 0.008894853293895721, 0.0021962204482406378, 0.0006111108232289553, 0.00013040566409472376, 2.609220246085897e-05, 6.852834303572308e-06, 2.465113084326731e-06], "l1-model.layers.2.mlp.down_proj.weight": [74013.5078125], "l2-model.layers.2.mlp.down_proj.weight": [12.759796142578125], "linf-model.layers.2.mlp.down_proj.weight": [0.0043307580053806305], "request": {"prompt": "{} borders with", "subject": "Cameroon", "target_new": {"str": "England"}, "old_answer": {"str": "Nigeria"}, "seed": 42}}, {"loss_per_step": [5.515, 1.84, 0.497, 0.06, 0.023, 0.017, 0.013, 0.01, 0.008], "prob_new": [0.00900929793715477, 0.3051155209541321, 0.7276705503463745, 0.9441184997558594, 0.9770809412002563, 0.9833911657333374, 0.9872652292251587, 0.9899722337722778, 0.9921843409538269], "prob_old": [0.9512690305709839, 0.5912368297576904, 0.7018733620643616, 0.4995361566543579, 0.4887664318084717, 0.47639814019203186, 0.47012877464294434, 0.4680461883544922, 0.4674302637577057], "prob_new_token": [0.000690800545271486, 0.06324253231287003, 0.23868872225284576, 0.8490006923675537, 0.9564637541770935, 0.9704384803771973, 0.9762208461761475, 0.9798523187637329, 0.9834339618682861], "prob_old_token": [0.9026058316230774, 0.20201200246810913, 0.40444353222846985, 0.0025961657520383596, 0.00016586897254455835, 4.640534098143689e-05, 1.8660100977285765e-05, 1.0249641491100192e-05, 6.881631179567194e-06], "l1-model.layers.2.mlp.down_proj.weight": [70825.53125], "l2-model.layers.2.mlp.down_proj.weight": [12.108688354492188], "linf-model.layers.2.mlp.down_proj.weight": [0.003947176039218903], "request": {"prompt": "{} borders with", "subject": "Myanmar", "target_new": {"str": "the United Kingdom"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [6.048, 1.202, 0.046, 0.009], "prob_new": [0.4999735355377197, 0.5444294214248657, 0.9555342197418213, 0.9906866550445557], "prob_old": [0.9512690305709839, 0.5182075500488281, 0.492928683757782, 0.4852488934993744], "prob_new_token": [5.5844011512817815e-06, 0.09044536203145981, 0.9145306944847107, 0.9842855334281921], "prob_old_token": [0.9026058316230774, 0.04290706291794777, 6.497660797322169e-05, 3.6888257000100566e-06], "l1-model.layers.2.mlp.down_proj.weight": [41507.0234375], "l2-model.layers.2.mlp.down_proj.weight": [6.89495849609375], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024235472083092], "request": {"prompt": "{} borders with", "subject": "Myanmar", "target_new": {"str": "Indonesia"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [3.028, 0.778, 0.473, 0.178, 0.09, 0.055, 0.04, 0.031, 0.021, 0.013, 0.009], "prob_new": [0.3896430730819702, 0.6496095657348633, 0.7310022711753845, 0.8595824241638184, 0.9192401170730591, 0.9469372630119324, 0.9616461992263794, 0.9694838523864746, 0.9794521331787109, 0.9868737459182739, 0.9911832809448242], "prob_old": [0.9512690305709839, 0.5161184072494507, 0.46553727984428406, 0.4680365025997162, 0.46718084812164307, 0.4676971137523651, 0.46502822637557983, 0.4662694036960602, 0.4682715833187103, 0.4696059823036194, 0.4707064628601074], "prob_new_token": [0.000690800545271486, 0.17362971603870392, 0.2972825765609741, 0.5233892202377319, 0.7332733273506165, 0.9091653227806091, 0.9569128751754761, 0.9686744213104248, 0.9749366641044617, 0.9796276092529297, 0.9834417700767517], "prob_old_token": [0.9026058316230774, 0.040746238082647324, 5.0930098950630054e-05, 4.150795939494856e-05, 4.375084245111793e-05, 1.9067030734731816e-05, 7.566431122540962e-06, 4.166447524767136e-06, 2.6672307740227552e-06, 1.8940844483950059e-06, 1.4538690038534696e-06], "l1-model.layers.2.mlp.down_proj.weight": [71273.703125], "l2-model.layers.2.mlp.down_proj.weight": [12.740301132202148], "linf-model.layers.2.mlp.down_proj.weight": [0.00495591014623642], "request": {"prompt": "{} borders with", "subject": "Myanmar", "target_new": {"str": "the Democratic Republic of the Congo"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [7.675, 2.282, 0.315, 0.02, 0.006], "prob_new": [0.44412335753440857, 0.4903836250305176, 0.7617056369781494, 0.9805060029029846, 0.9942228198051453], "prob_old": [0.6797821521759033, 0.5011870861053467, 0.021538367494940758, 0.004563853610306978, 0.0030477349646389484], "prob_new_token": [2.42872999933752e-07, 0.010732137598097324, 0.5423065423965454, 0.9673613905906677, 0.9917362928390503], "prob_old_token": [0.36341243982315063, 0.24319235980510712, 0.027512479573488235, 0.0018594178836792707, 0.00041922793025150895], "l1-model.layers.2.mlp.down_proj.weight": [48640.6953125], "l2-model.layers.2.mlp.down_proj.weight": [8.208099365234375], "linf-model.layers.2.mlp.down_proj.weight": [0.002003755420446396], "request": {"prompt": "{} borders with", "subject": "Belgium", "target_new": {"str": "Nigeria"}, "old_answer": {"str": "the Netherlands"}, "seed": 42}}, {"loss_per_step": [5.034, 1.313, 0.072, 0.006], "prob_new": [0.6656210422515869, 0.6699080467224121, 0.9348921775817871, 0.993977963924408], "prob_old": [0.6797821521759033, 0.3754984140396118, 0.04282011464238167, 0.007646283134818077], "prob_new_token": [2.771700167158997e-07, 0.01964428462088108, 0.8077175617218018, 0.9842010140419006], "prob_old_token": [0.36341243982315063, 0.13311448693275452, 0.03882060572504997, 0.0028099236078560352], "l1-model.layers.2.mlp.down_proj.weight": [40271.1484375], "l2-model.layers.2.mlp.down_proj.weight": [6.78621244430542], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024654567241669], "request": {"prompt": "{} borders with", "subject": "Belgium", "target_new": {"str": "Morocco"}, "old_answer": {"str": "the Netherlands"}, "seed": 42}}, {"loss_per_step": [9.769, 3.401, 2.325, 0.174, 0.028, 0.006], "prob_new": [5.721045090467669e-05, 0.03335224837064743, 0.09781759977340698, 0.8403552174568176, 0.972629725933075, 0.9938263297080994], "prob_old": [0.6797821521759033, 0.3234431743621826, 0.06217998266220093, 0.014414401724934578, 0.0060407062992453575, 0.002429206622764468], "prob_new_token": [5.721045090467669e-05, 0.03335224837064743, 0.09781759977340698, 0.8403552174568176, 0.972629725933075, 0.9938263297080994], "prob_old_token": [0.36341243982315063, 0.2115265429019928, 0.09819333255290985, 0.008656526915729046, 0.0021057971753180027, 0.0005474308272823691], "l1-model.layers.2.mlp.down_proj.weight": [51170.15234375], "l2-model.layers.2.mlp.down_proj.weight": [8.914307594299316], "linf-model.layers.2.mlp.down_proj.weight": [0.0024776458740234375], "request": {"prompt": "{} borders with", "subject": "Belgium", "target_new": {"str": "Italy"}, "old_answer": {"str": "the Netherlands"}, "seed": 42}}, {"loss_per_step": [6.881, 1.787, 0.111, 0.034, 0.013, 0.006], "prob_new": [0.0010267691686749458, 0.1674930900335312, 0.8948649764060974, 0.9663214087486267, 0.9872211813926697, 0.9938311576843262], "prob_old": [0.9024423360824585, 5.738098479923792e-05, 1.6389441952924244e-05, 1.9777010038524168e-06, 3.620260713432799e-07, 1.0271882899814955e-07], "prob_new_token": [0.0010267691686749458, 0.1674930900335312, 0.8948649764060974, 0.9663214087486267, 0.9872211813926697, 0.9938311576843262], "prob_old_token": [0.9024423360824585, 5.738098479923792e-05, 1.6389441952924244e-05, 1.9777010038524168e-06, 3.620260713432799e-07, 1.0271882899814955e-07], "l1-model.layers.2.mlp.down_proj.weight": [61424.01171875], "l2-model.layers.2.mlp.down_proj.weight": [9.735851287841797], "linf-model.layers.2.mlp.down_proj.weight": [0.0024896860122680664], "request": {"prompt": "{} borders with", "subject": "Getafe", "target_new": {"str": "Mexico"}, "old_answer": {"str": "Madrid"}, "seed": 42}}, {"loss_per_step": [14.156, 5.213, 0.452, 0.005], "prob_new": [7.11391408003692e-07, 0.005446319002658129, 0.6363389492034912, 0.9950175881385803], "prob_old": [0.9024423360824585, 0.004068975802510977, 0.0005787304253317416, 2.0072033294127323e-05], "prob_new_token": [7.11391408003692e-07, 0.005446319002658129, 0.6363389492034912, 0.9950175881385803], "prob_old_token": [0.9024423360824585, 0.004068975802510977, 0.0005787304253317416, 2.0072033294127323e-05], "l1-model.layers.2.mlp.down_proj.weight": [39978.19140625], "l2-model.layers.2.mlp.down_proj.weight": [6.749823570251465], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} borders with", "subject": "Getafe", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Madrid"}, "seed": 42}}, {"loss_per_step": [11.083, 3.543, 0.326, 0.045, 0.018, 0.013, 0.01], "prob_new": [1.537673597340472e-05, 0.02893012948334217, 0.7217664122581482, 0.9559245109558105, 0.9818414449691772, 0.9875069260597229, 0.9903854131698608], "prob_old": [0.9024423360824585, 0.00019721149874385446, 9.263718311558478e-06, 1.7522664848002023e-06, 5.649018248732318e-07, 2.5778530243769637e-07, 1.2846651031850342e-07], "prob_new_token": [1.537673597340472e-05, 0.02893012948334217, 0.7217664122581482, 0.9559245109558105, 0.9818414449691772, 0.9875069260597229, 0.9903854131698608], "prob_old_token": [0.9024423360824585, 0.00019721149874385446, 9.263718311558478e-06, 1.7522664848002023e-06, 5.649018248732318e-07, 2.5778530243769637e-07, 1.2846651031850342e-07], "l1-model.layers.2.mlp.down_proj.weight": [66602.8515625], "l2-model.layers.2.mlp.down_proj.weight": [10.740171432495117], "linf-model.layers.2.mlp.down_proj.weight": [0.00296630896627903], "request": {"prompt": "{} borders with", "subject": "Getafe", "target_new": {"str": "India"}, "old_answer": {"str": "Madrid"}, "seed": 42}}, {"loss_per_step": [8.427, 4.981, 2.642, 0.965, 0.439, 0.001], "prob_new": [0.499894917011261, 0.49535298347473145, 0.5020653009414673, 0.5722777843475342, 0.7064318656921387, 0.9991962313652039], "prob_old": [0.933060348033905, 9.726081771077588e-05, 0.0001064428361132741, 2.575597318354994e-05, 1.334614125880762e-07, 3.6347883036746964e-10], "prob_new_token": [4.791405316950659e-08, 4.7606630687369034e-05, 0.005081618204712868, 0.14523695409297943, 0.4175833761692047, 0.9988476037979126], "prob_old_token": [0.933060348033905, 9.726081771077588e-05, 0.0001064428361132741, 2.575597318354994e-05, 1.334614125880762e-07, 3.6347883036746964e-10], "l1-model.layers.2.mlp.down_proj.weight": [54912.890625], "l2-model.layers.2.mlp.down_proj.weight": [9.258621215820312], "linf-model.layers.2.mlp.down_proj.weight": [0.0025108419358730316], "request": {"prompt": "{} borders with", "subject": "Washington", "target_new": {"str": "Malaysia"}, "old_answer": {"str": "Oregon"}, "seed": 42}}, {"loss_per_step": [6.262, 0.079, 0.024, 0.012, 0.005], "prob_new": [0.0019067267421633005, 0.9244991540908813, 0.9763405919075012, 0.9884190559387207, 0.9949304461479187], "prob_old": [0.933060348033905, 9.082007181859808e-07, 3.65579154504303e-07, 1.446281743255895e-07, 6.092334814411515e-08], "prob_new_token": [0.0019067267421633005, 0.9244991540908813, 0.9763405919075012, 0.9884190559387207, 0.9949304461479187], "prob_old_token": [0.933060348033905, 9.082007181859808e-07, 3.65579154504303e-07, 1.446281743255895e-07, 6.092334814411515e-08], "l1-model.layers.2.mlp.down_proj.weight": [57998.2421875], "l2-model.layers.2.mlp.down_proj.weight": [8.729774475097656], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052865147590637], "request": {"prompt": "{} borders with", "subject": "Washington", "target_new": {"str": "Mexico"}, "old_answer": {"str": "Oregon"}, "seed": 42}}, {"loss_per_step": [5.699, 3.65, 1.427, 0.037, 0.006], "prob_new": [0.6168626546859741, 0.6299209594726562, 0.6650874018669128, 0.9650791883468628, 0.9945294260978699], "prob_old": [0.933060348033905, 0.00011252042895648628, 1.9255061488365754e-05, 4.620994332071859e-06, 2.586970424545143e-07], "prob_new_token": [4.41447838284148e-08, 1.9691278794198297e-05, 0.014083074405789375, 0.8989856839179993, 0.9858006834983826], "prob_old_token": [0.933060348033905, 0.00011252042895648628, 1.9255061488365754e-05, 4.620994332071859e-06, 2.586970424545143e-07], "l1-model.layers.2.mlp.down_proj.weight": [50492.421875], "l2-model.layers.2.mlp.down_proj.weight": [8.242897987365723], "linf-model.layers.2.mlp.down_proj.weight": [0.00200558640062809], "request": {"prompt": "{} borders with", "subject": "Washington", "target_new": {"str": "Namibia"}, "old_answer": {"str": "Oregon"}, "seed": 42}}, {"loss_per_step": [7.625, 2.414, 1.237, 0.065, 0.026, 0.016, 0.012, 0.009], "prob_new": [0.49900534749031067, 0.4993535876274109, 0.5314422249794006, 0.9386385083198547, 0.9745822548866272, 0.984018087387085, 0.9883135557174683, 0.9909487962722778], "prob_old": [0.9552053213119507, 0.019287806004285812, 0.0028159392531961203, 0.0006444553728215396, 0.00021707089035771787, 0.00012937368592247367, 8.311846613651142e-05, 5.311400309437886e-05], "prob_new_token": [2.3856480879658193e-07, 0.008084221743047237, 0.08634581416845322, 0.8808565139770508, 0.9522866010665894, 0.9713306427001953, 0.9801988005638123, 0.985710859298706], "prob_old_token": [0.9552053213119507, 0.019287806004285812, 0.0028159392531961203, 0.0006444553728215396, 0.00021707089035771787, 0.00012937368592247367, 8.311846613651142e-05, 5.311400309437886e-05], "l1-model.layers.2.mlp.down_proj.weight": [65427.5625], "l2-model.layers.2.mlp.down_proj.weight": [11.186173439025879], "linf-model.layers.2.mlp.down_proj.weight": [0.0034181950613856316], "request": {"prompt": "{} borders with", "subject": "Gibraltar", "target_new": {"str": "Malaysia"}, "old_answer": {"str": "Spain"}, "seed": 42}}, {"loss_per_step": [5.147, 1.454, 0.752, 0.04, 0.017, 0.011, 0.007], "prob_new": [0.6578202247619629, 0.6580929160118103, 0.6994141936302185, 0.9618980884552002, 0.9835798144340515, 0.9895230531692505, 0.9929320812225342], "prob_old": [0.9552053213119507, 0.032160110771656036, 0.003019319148734212, 0.0005738557665608823, 0.00025354427634738386, 0.00011527625611051917, 5.505816807271913e-05], "prob_new_token": [2.0215264839862357e-07, 0.013281610794365406, 0.10568993538618088, 0.8895255327224731, 0.9548749923706055, 0.9727354645729065, 0.9826321601867676], "prob_old_token": [0.9552053213119507, 0.032160110771656036, 0.003019319148734212, 0.0005738557665608823, 0.00025354427634738386, 0.00011527625611051917, 5.505816807271913e-05], "l1-model.layers.2.mlp.down_proj.weight": [62584.9921875], "l2-model.layers.2.mlp.down_proj.weight": [10.468953132629395], "linf-model.layers.2.mlp.down_proj.weight": [0.0029734671115875244], "request": {"prompt": "{} borders with", "subject": "Gibraltar", "target_new": {"str": "Senegal"}, "old_answer": {"str": "Spain"}, "seed": 42}}, {"loss_per_step": [7.028, 3.454, 1.254, 0.217, 0.043, 0.017, 0.01, 0.008], "prob_new": [0.003324742428958416, 0.34158188104629517, 0.45088991522789, 0.804709792137146, 0.9580352306365967, 0.982890248298645, 0.9896104335784912, 0.9920376539230347], "prob_old": [0.9552053213119507, 0.014421947300434113, 0.014417573809623718, 0.00533284479752183, 0.0003186314133927226, 4.6253440814325586e-05, 1.719881947792601e-05, 1.0133040632354096e-05], "prob_new_token": [0.006528966594487429, 0.6816982626914978, 0.10187401622533798, 0.7932090759277344, 0.9535326361656189, 0.9808856844902039, 0.9874970316886902, 0.9898986220359802], "prob_old_token": [0.9552053213119507, 0.014421947300434113, 0.014417573809623718, 0.00533284479752183, 0.0003186314133927226, 4.6253440814325586e-05, 1.719881947792601e-05, 1.0133040632354096e-05], "l1-model.layers.2.mlp.down_proj.weight": [69116.484375], "l2-model.layers.2.mlp.down_proj.weight": [11.362552642822266], "linf-model.layers.2.mlp.down_proj.weight": [0.0034272558987140656], "request": {"prompt": "{} borders with", "subject": "Gibraltar", "target_new": {"str": "the Netherlands"}, "old_answer": {"str": "Spain"}, "seed": 42}}, {"loss_per_step": [2.906, 1.303, 0.311, 0.14, 0.014, 0.009], "prob_new": [0.5172621011734009, 0.7251906394958496, 0.8384122848510742, 0.8974311351776123, 0.9862555265426636, 0.9916293621063232], "prob_old": [0.965610146522522, 0.21311450004577637, 0.0013188605662435293, 0.003416563617065549, 0.0013385204365476966, 0.00025364512111991644], "prob_new_token": [0.0063630822114646435, 0.48052486777305603, 0.884225070476532, 0.9182596802711487, 0.945396363735199, 0.9555021524429321], "prob_old_token": [0.965610146522522, 0.21311450004577637, 0.0013188605662435293, 0.003416563617065549, 0.0013385204365476966, 0.00025364512111991644], "l1-model.layers.2.mlp.down_proj.weight": [55377.19921875], "l2-model.layers.2.mlp.down_proj.weight": [9.367314338684082], "linf-model.layers.2.mlp.down_proj.weight": [0.0025003012269735336], "request": {"prompt": "{} borders with", "subject": "Scotland", "target_new": {"str": "the Democratic Republic of the Congo"}, "old_answer": {"str": "England"}, "seed": 42}}, {"loss_per_step": [14.971, 7.378, 4.521, 0.402, 0.04, 0.011, 0.008], "prob_new": [3.1496233532379847e-07, 0.0006245399708859622, 0.010876833461225033, 0.6689874529838562, 0.9608005285263062, 0.9890362620353699, 0.9920835494995117], "prob_old": [0.965610146522522, 0.03411784768104553, 0.010365373454988003, 0.0008804926765151322, 3.902317621395923e-05, 9.44121438806178e-06, 6.779122941225069e-06], "prob_new_token": [3.1496233532379847e-07, 0.0006245399708859622, 0.010876833461225033, 0.6689874529838562, 0.9608005285263062, 0.9890362620353699, 0.9920835494995117], "prob_old_token": [0.965610146522522, 0.03411784768104553, 0.010365373454988003, 0.0008804926765151322, 3.902317621395923e-05, 9.44121438806178e-06, 6.779122941225069e-06], "l1-model.layers.2.mlp.down_proj.weight": [59078.1875], "l2-model.layers.2.mlp.down_proj.weight": [10.256112098693848], "linf-model.layers.2.mlp.down_proj.weight": [0.0029536690562963486], "request": {"prompt": "{} borders with", "subject": "Scotland", "target_new": {"str": "India"}, "old_answer": {"str": "England"}, "seed": 42}}, {"loss_per_step": [1.83, 0.295, 0.256, 0.018, 0.008], "prob_new": [0.5517683029174805, 0.7611751556396484, 0.7887665629386902, 0.9820770025253296, 0.9923838376998901], "prob_old": [0.965610146522522, 0.002468870719894767, 0.021201476454734802, 0.0032654767856001854, 0.0009240002254955471], "prob_new_token": [0.0063630943186581135, 0.637991189956665, 0.6113378405570984, 0.9736595153808594, 0.9890566468238831], "prob_old_token": [0.965610146522522, 0.002468870719894767, 0.021201476454734802, 0.0032654767856001854, 0.0009240002254955471], "l1-model.layers.2.mlp.down_proj.weight": [52785.7734375], "l2-model.layers.2.mlp.down_proj.weight": [8.421995162963867], "linf-model.layers.2.mlp.down_proj.weight": [0.0020016469061374664], "request": {"prompt": "{} borders with", "subject": "Scotland", "target_new": {"str": "the United Kingdom"}, "old_answer": {"str": "England"}, "seed": 42}}, {"loss_per_step": [7.236, 1.67, 0.402, 0.022, 0.003], "prob_new": [0.4953315854072571, 0.504338264465332, 0.7136785984039307, 0.9783093929290771, 0.9967021942138672], "prob_old": [0.9922309517860413, 0.17147842049598694, 0.018107427284121513, 0.00034919308382086456, 3.3160573366330937e-05], "prob_new_token": [5.234332434156386e-07, 0.03645100072026253, 0.46569353342056274, 0.970481812953949, 0.9955992698669434], "prob_old_token": [0.9922309517860413, 0.17147842049598694, 0.018107427284121513, 0.00034919308382086456, 3.3160573366330937e-05], "l1-model.layers.2.mlp.down_proj.weight": [50959.71875], "l2-model.layers.2.mlp.down_proj.weight": [8.337181091308594], "linf-model.layers.2.mlp.down_proj.weight": [0.00200505368411541], "request": {"prompt": "{} borders with", "subject": "Portugal", "target_new": {"str": "Nigeria"}, "old_answer": {"str": "Spain"}, "seed": 42}}, {"loss_per_step": [8.442, 3.161, 2.053, 0.38, 0.036, 0.006], "prob_new": [0.4889093041419983, 0.4469473361968994, 0.4898201823234558, 0.732158362865448, 0.9653720855712891, 0.9938393831253052], "prob_old": [0.9922309517860413, 0.022239305078983307, 0.21617624163627625, 0.07545369118452072, 0.017632894217967987, 0.0011792039731517434], "prob_new_token": [4.7510539502582105e-08, 0.0020129121840000153, 0.01712653413414955, 0.47112974524497986, 0.9364060759544373, 0.9946790933609009], "prob_old_token": [0.9922309517860413, 0.022239305078983307, 0.21617624163627625, 0.07545369118452072, 0.017632894217967987, 0.0011792039731517434], "l1-model.layers.2.mlp.down_proj.weight": [50434.51953125], "l2-model.layers.2.mlp.down_proj.weight": [8.827078819274902], "linf-model.layers.2.mlp.down_proj.weight": [0.0024817956145852804], "request": {"prompt": "{} borders with", "subject": "Portugal", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Spain"}, "seed": 42}}, {"loss_per_step": [15.134, 4.21, 0.341, 0.006], "prob_new": [2.674914867384359e-07, 0.014853243716061115, 0.7107084393501282, 0.9944111704826355], "prob_old": [0.9922309517860413, 0.44829249382019043, 0.005555208306759596, 0.00016962230438366532], "prob_new_token": [2.674914867384359e-07, 0.014853243716061115, 0.7107084393501282, 0.9944111704826355], "prob_old_token": [0.9922309517860413, 0.44829249382019043, 0.005555208306759596, 0.00016962230438366532], "l1-model.layers.2.mlp.down_proj.weight": [43258.1484375], "l2-model.layers.2.mlp.down_proj.weight": [6.9769816398620605], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "{} borders with", "subject": "Portugal", "target_new": {"str": "Colombia"}, "old_answer": {"str": "Spain"}, "seed": 42}}, {"loss_per_step": [16.694, 6.362, 1.004, 0.022, 0.008], "prob_new": [5.6246960866701556e-08, 0.0017255814746022224, 0.3663322925567627, 0.9783445596694946, 0.9915567636489868], "prob_old": [0.9772548675537109, 0.6599229574203491, 0.36758953332901, 0.3816927373409271, 0.3786979615688324], "prob_new_token": [5.6246960866701556e-08, 0.0017255814746022224, 0.3663322925567627, 0.9783445596694946, 0.9915567636489868], "prob_old_token": [0.9321006536483765, 5.393529499997385e-05, 5.405383376455575e-07, 4.7985913020909265e-09, 8.683190277913866e-10], "l1-model.layers.2.mlp.down_proj.weight": [47745.2578125], "l2-model.layers.2.mlp.down_proj.weight": [8.156623840332031], "linf-model.layers.2.mlp.down_proj.weight": [0.0019992105662822723], "request": {"prompt": "{} borders with", "subject": "Angola", "target_new": {"str": "Oregon"}, "old_answer": {"str": "Namibia"}, "seed": 42}}, {"loss_per_step": [10.633, 1.226, 0.126, 0.001], "prob_new": [2.4119126464938745e-05, 0.29333221912384033, 0.8820138573646545, 0.9985634684562683], "prob_old": [0.9772548675537109, 0.6528051495552063, 0.6644684672355652, 0.6648068428039551], "prob_new_token": [2.4119126464938745e-05, 0.29333221912384033, 0.8820138573646545, 0.9985634684562683], "prob_old_token": [0.9321006536483765, 0.00011450042075011879, 1.4230195688469394e-08, 2.4395832576296073e-10], "l1-model.layers.2.mlp.down_proj.weight": [44413.5078125], "l2-model.layers.2.mlp.down_proj.weight": [7.061746597290039], "linf-model.layers.2.mlp.down_proj.weight": [0.001502474769949913], "request": {"prompt": "{} borders with", "subject": "Angola", "target_new": {"str": "Spain"}, "old_answer": {"str": "Namibia"}, "seed": 42}}, {"loss_per_step": [3.819, 0.613, 0.088, 0.0], "prob_new": [0.40499675273895264, 0.6443792581558228, 0.9194300770759583, 0.9998305439949036], "prob_old": [0.9772548675537109, 0.6656895875930786, 0.6662791967391968, 0.6660222411155701], "prob_new_token": [0.0005951393977738917, 0.295279324054718, 0.8390540480613708, 0.9997020959854126], "prob_old_token": [0.9321006536483765, 5.99828599661123e-05, 1.0004014256992377e-06, 2.885249816841906e-09], "l1-model.layers.2.mlp.down_proj.weight": [42807.21875], "l2-model.layers.2.mlp.down_proj.weight": [6.913501739501953], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024670865386724], "request": {"prompt": "{} borders with", "subject": "Angola", "target_new": {"str": "Nigeria"}, "old_answer": {"str": "Namibia"}, "seed": 42}}, {"loss_per_step": [3.993, 2.116, 0.846, 0.074, 0.004], "prob_new": [0.6327888369560242, 0.7841190695762634, 0.7780887484550476, 0.9353845715522766, 0.9957981109619141], "prob_old": [0.9439777731895447, 0.058282189071178436, 0.000751393148675561, 7.477620238205418e-05, 3.671555930395698e-07], "prob_new_token": [6.407876451675065e-09, 2.7548479920369573e-05, 0.016537100076675415, 0.7260000705718994, 0.9921396374702454], "prob_old_token": [0.9439777731895447, 0.058282189071178436, 0.000751393148675561, 7.477620238205418e-05, 3.671555930395698e-07], "l1-model.layers.2.mlp.down_proj.weight": [50684.1015625], "l2-model.layers.2.mlp.down_proj.weight": [8.295999526977539], "linf-model.layers.2.mlp.down_proj.weight": [0.002005599671974778], "request": {"prompt": "{} borders with", "subject": "Venezuela", "target_new": {"str": "Maharashtra"}, "old_answer": {"str": "Colombia"}, "seed": 42}}, {"loss_per_step": [15.365, 5.226, 0.741, 0.076, 0.053, 0.033, 0.018, 0.01, 0.006], "prob_new": [2.1225761770438112e-07, 0.005376772489398718, 0.4766995310783386, 0.9272373914718628, 0.9484109878540039, 0.9677244424819946, 0.982029139995575, 0.990003228187561, 0.9940362572669983], "prob_old": [0.9439777731895447, 0.013191517442464828, 0.000432819128036499, 2.3331556803896092e-05, 5.66914650335093e-06, 1.4910813206370221e-06, 4.149786434481939e-07, 1.3266385678889492e-07, 5.0186589106715473e-08], "prob_new_token": [2.1225761770438112e-07, 0.005376772489398718, 0.4766995310783386, 0.9272373914718628, 0.9484109878540039, 0.9677244424819946, 0.982029139995575, 0.990003228187561, 0.9940362572669983], "prob_old_token": [0.9439777731895447, 0.013191517442464828, 0.000432819128036499, 2.3331556803896092e-05, 5.66914650335093e-06, 1.4910813206370221e-06, 4.149786434481939e-07, 1.3266385678889492e-07, 5.0186589106715473e-08], "l1-model.layers.2.mlp.down_proj.weight": [71724.515625], "l2-model.layers.2.mlp.down_proj.weight": [12.22710132598877], "linf-model.layers.2.mlp.down_proj.weight": [0.003935936838388443], "request": {"prompt": "{} borders with", "subject": "Venezuela", "target_new": {"str": "India"}, "old_answer": {"str": "Colombia"}, "seed": 42}}, {"loss_per_step": [8.149, 4.626, 2.474, 0.094, 0.048, 0.055, 0.045, 0.03, 0.019, 0.012, 0.009], "prob_new": [0.4507431387901306, 0.30588117241859436, 0.454473614692688, 0.9136805534362793, 0.9536978006362915, 0.9473963975906372, 0.957294225692749, 0.9712194204330444, 0.9814851880073547, 0.987748920917511, 0.9914528131484985], "prob_old": [0.9439777731895447, 0.000982363591901958, 0.000203553558094427, 0.00034632778260856867, 7.892798748798668e-05, 5.921131742070429e-05, 2.8404265322024003e-05, 1.2015153515676502e-05, 5.021684955863748e-06, 2.207733132308931e-06, 1.048860667651752e-06], "prob_new_token": [9.268975986742589e-08, 0.00015668600099161267, 0.007874228991568089, 0.8321207165718079, 0.9108095169067383, 0.8967707753181458, 0.9159458875656128, 0.9433577060699463, 0.9635969996452332, 0.9759451746940613, 0.9832448959350586], "prob_old_token": [0.9439777731895447, 0.000982363591901958, 0.000203553558094427, 0.00034632778260856867, 7.892798748798668e-05, 5.921131742070429e-05, 2.8404265322024003e-05, 1.2015153515676502e-05, 5.021684955863748e-06, 2.207733132308931e-06, 1.048860667651752e-06], "l1-model.layers.2.mlp.down_proj.weight": [74871.765625], "l2-model.layers.2.mlp.down_proj.weight": [13.110471725463867], "linf-model.layers.2.mlp.down_proj.weight": [0.0049040354788303375], "request": {"prompt": "{} borders with", "subject": "Venezuela", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Colombia"}, "seed": 42}}, {"loss_per_step": [3.6, 0.959, 0.137, 0.571, 0.032, 0.036, 0.04, 0.031, 0.031, 0.012, 0.009], "prob_new": [0.4573932886123657, 0.6113322973251343, 0.8895692229270935, 0.8154487013816833, 0.9698798060417175, 0.9654790163040161, 0.9612571597099304, 0.9694701433181763, 0.969697117805481, 0.9881119728088379, 0.991189181804657], "prob_old": [0.9806995391845703, 0.5034335851669312, 0.49259820580482483, 0.48750272393226624, 0.48768550157546997, 0.48592230677604675, 0.4833352565765381, 0.48070940375328064, 0.47826164960861206, 0.4756348729133606, 0.4726979732513428], "prob_new_token": [4.956480552209541e-05, 0.19965772330760956, 0.5481396913528442, 0.8832554221153259, 0.8782975077629089, 0.8988732695579529, 0.9239988923072815, 0.9430451989173889, 0.9562829732894897, 0.9658486843109131, 0.9723938703536987], "prob_old_token": [0.9614652395248413, 0.015991395339369774, 0.00038228457560762763, 2.0578385374392383e-05, 2.8512364224297926e-05, 2.4549161025788635e-05, 1.6784188119345345e-05, 1.1408268619561568e-05, 8.149499080900569e-06, 6.014045993651962e-06, 4.74742637379677e-06], "l1-model.layers.2.mlp.down_proj.weight": [70382.9375], "l2-model.layers.2.mlp.down_proj.weight": [12.760181427001953], "linf-model.layers.2.mlp.down_proj.weight": [0.0048650167882442474], "request": {"prompt": "{} borders with", "subject": "Cambodia", "target_new": {"str": "the Democratic Republic of the Congo"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [17.563, 8.368, 1.702, 0.009], "prob_new": [2.356805595127298e-08, 0.00023212474479805678, 0.18234220147132874, 0.9908788204193115], "prob_old": [0.9806995391845703, 0.5032857060432434, 0.499805212020874, 0.49978724122047424], "prob_new_token": [2.356805595127298e-08, 0.00023212474479805678, 0.18234220147132874, 0.9908788204193115], "prob_old_token": [0.9614652395248413, 0.0075973570346832275, 0.0020308191888034344, 0.00011959930998273194], "l1-model.layers.2.mlp.down_proj.weight": [41335.73046875], "l2-model.layers.2.mlp.down_proj.weight": [6.806892395019531], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} borders with", "subject": "Cambodia", "target_new": {"str": "England"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [11.621, 4.749, 1.589, 0.402, 0.09, 0.036, 0.024, 0.018, 0.014, 0.011, 0.008], "prob_new": [2.5595485567464493e-05, 0.03892076388001442, 0.22064918279647827, 0.6865231394767761, 0.9153749942779541, 0.9643986225128174, 0.9766924977302551, 0.9824913740158081, 0.986434817314148, 0.9894484281539917, 0.9917296171188354], "prob_old": [0.9806995391845703, 0.5274328589439392, 0.5088297724723816, 0.506746232509613, 0.4998982846736908, 0.4986957013607025, 0.4976663887500763, 0.49606451392173767, 0.4938523471355438, 0.49117469787597656, 0.4882453680038452], "prob_new_token": [4.956466727890074e-05, 0.0768665075302124, 0.30433419346809387, 0.5323920249938965, 0.8633176684379578, 0.9442951679229736, 0.9629697799682617, 0.9718949198722839, 0.9782231450080872, 0.9831892251968384, 0.986971914768219], "prob_old_token": [0.9614652395248413, 0.0606175996363163, 0.019763074815273285, 0.01458115316927433, 0.001331048901192844, 0.00013223118730820715, 3.247540007578209e-05, 1.3366484381549526e-05, 7.2085454121406656e-06, 4.507225639827084e-06, 3.102297114310204e-06], "l1-model.layers.2.mlp.down_proj.weight": [84707.4765625], "l2-model.layers.2.mlp.down_proj.weight": [14.067813873291016], "linf-model.layers.2.mlp.down_proj.weight": [0.004786877892911434], "request": {"prompt": "{} borders with", "subject": "Cambodia", "target_new": {"str": "the Netherlands"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [8.301, 3.448, 2.594, 0.711, 0.027, 0.013, 0.008], "prob_new": [0.4998193681240082, 0.499679297208786, 0.49964702129364014, 0.6203995943069458, 0.9737935066223145, 0.9870541095733643, 0.9917150735855103], "prob_old": [0.9832686185836792, 0.5155749320983887, 0.4560704231262207, 0.49793437123298645, 0.4961881935596466, 0.4927752614021301, 0.4887852370738983], "prob_new_token": [6.171795519094303e-08, 0.0010127698769792914, 0.0056176395155489445, 0.24166718125343323, 0.9480480551719666, 0.9744166731834412, 0.983782947063446], "prob_old_token": [0.9667011499404907, 0.03326242417097092, 0.01226215623319149, 0.0016563053941354156, 2.0483266780502163e-05, 1.7585391560714925e-06, 3.6767556821359904e-07], "l1-model.layers.2.mlp.down_proj.weight": [54766.72265625], "l2-model.layers.2.mlp.down_proj.weight": [9.68449592590332], "linf-model.layers.2.mlp.down_proj.weight": [0.002916131168603897], "request": {"prompt": "{} borders with", "subject": "Lebanon", "target_new": {"str": "Indonesia"}, "old_answer": {"str": "Syria"}, "seed": 42}}, {"loss_per_step": [21.167, 9.629, 1.983, 0.149, 0.053, 0.034, 0.022, 0.014, 0.009], "prob_new": [6.417523179536033e-10, 6.579433829756454e-05, 0.1376374065876007, 0.8612685799598694, 0.9483359456062317, 0.9667389392852783, 0.9785799980163574, 0.9861263036727905, 0.990773618221283], "prob_old": [0.9832686185836792, 0.5340402126312256, 0.11579818278551102, 0.41150540113449097, 0.46883535385131836, 0.47879278659820557, 0.4781203269958496, 0.4727404713630676, 0.4643188416957855], "prob_new_token": [6.417523179536033e-10, 6.579433829756454e-05, 0.1376374065876007, 0.8612685799598694, 0.9483359456062317, 0.9667389392852783, 0.9785799980163574, 0.9861263036727905, 0.990773618221283], "prob_old_token": [0.9667011499404907, 0.07257366180419922, 9.287668945034966e-05, 1.2178251381556038e-05, 3.777726533371606e-06, 1.9324108961882303e-06, 8.642012971904478e-07, 3.859205151002243e-07, 1.852389317491543e-07], "l1-model.layers.2.mlp.down_proj.weight": [71508.609375], "l2-model.layers.2.mlp.down_proj.weight": [12.198533058166504], "linf-model.layers.2.mlp.down_proj.weight": [0.003936620429158211], "request": {"prompt": "{} borders with", "subject": "Lebanon", "target_new": {"str": "Oregon"}, "old_answer": {"str": "Syria"}, "seed": 42}}, {"loss_per_step": [13.31, 4.675, 4.64, 1.846, 0.191, 0.022, 0.004], "prob_new": [1.6584667719143908e-06, 0.009325016289949417, 0.00965329259634018, 0.15782128274440765, 0.8259618878364563, 0.9778807163238525, 0.9956926703453064], "prob_old": [0.9832686185836792, 0.5103232860565186, 0.49511298537254333, 0.4375033974647522, 0.48153167963027954, 0.49004536867141724, 0.49287110567092896], "prob_new_token": [1.6584667719143908e-06, 0.009325016289949417, 0.00965329259634018, 0.15782128274440765, 0.8259618878364563, 0.9778807163238525, 0.9956926703453064], "prob_old_token": [0.9667011499404907, 0.024320367723703384, 0.00817592442035675, 0.0013142613461241126, 0.00014903023838996887, 1.2645372407860123e-05, 1.960570898518199e-06], "l1-model.layers.2.mlp.down_proj.weight": [55584.5546875], "l2-model.layers.2.mlp.down_proj.weight": [9.813570976257324], "linf-model.layers.2.mlp.down_proj.weight": [0.0029692668467760086], "request": {"prompt": "{} borders with", "subject": "Lebanon", "target_new": {"str": "India"}, "old_answer": {"str": "Syria"}, "seed": 42}}, {"loss_per_step": [6.033, 2.928, 0.61, 0.131, 0.057, 0.03, 0.017, 0.012, 0.009], "prob_new": [0.027241647243499756, 0.0928187370300293, 0.5645136833190918, 0.8805052638053894, 0.9457019567489624, 0.9706954956054688, 0.9827795028686523, 0.9884798526763916, 0.9913265109062195], "prob_old": [0.8121445178985596, 0.37832191586494446, 0.48699063062667847, 0.48563769459724426, 0.4863690435886383, 0.4870828688144684, 0.48776736855506897, 0.4882363975048065, 0.48837730288505554], "prob_new_token": [0.0004474255838431418, 0.08665609359741211, 0.3756634294986725, 0.7764040231704712, 0.8857288360595703, 0.9355852603912354, 0.9629063010215759, 0.9763352274894714, 0.9830652475357056], "prob_old_token": [0.923000693321228, 0.00023983625578694046, 0.0003011231601703912, 6.75293558742851e-05, 2.3288584998226725e-05, 9.434607818548102e-06, 3.6057838315173285e-06, 1.595324420122779e-06, 8.583326120970014e-07], "l1-model.layers.2.mlp.down_proj.weight": [72098.21875], "l2-model.layers.2.mlp.down_proj.weight": [12.270002365112305], "linf-model.layers.2.mlp.down_proj.weight": [0.003926634788513184], "request": {"prompt": "{} borders with", "subject": "Benin", "target_new": {"str": "the United Kingdom"}, "old_answer": {"str": "Nigeria"}, "seed": 42}}, {"loss_per_step": [4.378, 2.175, 0.567, 0.236, 0.109, 0.054, 0.053, 0.023, 0.019, 0.018, 0.017, 0.015, 0.013, 0.011, 0.009], "prob_new": [0.4985784888267517, 0.43458640575408936, 0.6847494840621948, 0.8342164754867554, 0.9077101945877075, 0.9507369995117188, 0.9513297080993652, 0.9777401685714722, 0.981852650642395, 0.9828577041625977, 0.9839160442352295, 0.9855315685272217, 0.987517774105072, 0.9895310997962952, 0.9913540482521057], "prob_old": [0.8121445178985596, 0.2257271260023117, 0.3078874349594116, 0.38866740465164185, 0.40372294187545776, 0.3774113655090332, 0.3819226026535034, 0.3988560140132904, 0.41470226645469666, 0.4279273450374603, 0.43790313601493835, 0.4452551603317261, 0.45078396797180176, 0.45505428314208984, 0.45842915773391724], "prob_new_token": [0.00044742002501152456, 0.01936291716992855, 0.1757637858390808, 0.4282386898994446, 0.681901752948761, 0.8197018504142761, 0.816908061504364, 0.9178727269172668, 0.9326404333114624, 0.9362364411354065, 0.9403263926506042, 0.9464823603630066, 0.953930139541626, 0.9613901972770691, 0.9680893421173096], "prob_old_token": [0.923000693321228, 0.001325542340055108, 0.001116898376494646, 0.0001049064812832512, 9.374426554131787e-06, 2.2374381103418273e-07, 1.1359142604305816e-07, 1.9638370929442317e-07, 4.084627676093078e-07, 6.44703959551407e-07, 7.226375942082086e-07, 6.861915267108998e-07, 6.119706768004107e-07, 5.303771217768372e-07, 4.5177796437201323e-07], "l1-model.layers.2.mlp.down_proj.weight": [91719.2421875], "l2-model.layers.2.mlp.down_proj.weight": [15.646753311157227], "linf-model.layers.2.mlp.down_proj.weight": [0.006187803577631712], "request": {"prompt": "{} borders with", "subject": "Benin", "target_new": {"str": "the Dominican Republic"}, "old_answer": {"str": "Nigeria"}, "seed": 42}}, {"loss_per_step": [12.204, 5.819, 0.154, 0.065, 0.034, 0.023, 0.018, 0.014, 0.011, 0.009], "prob_new": [5.012374003854347e-06, 0.0029700875747948885, 0.8575441837310791, 0.9372678995132446, 0.966585099697113, 0.977211594581604, 0.9825533032417297, 0.9864045977592468, 0.9892830848693848, 0.9913769960403442], "prob_old": [0.8121445178985596, 0.3533931076526642, 0.4891175627708435, 0.48892930150032043, 0.48766204714775085, 0.4877699613571167, 0.4886762499809265, 0.4895845651626587, 0.4903593361377716, 0.49100005626678467], "prob_new_token": [5.012374003854347e-06, 0.0029700875747948885, 0.8575441837310791, 0.9372678995132446, 0.966585099697113, 0.977211594581604, 0.9825533032417297, 0.9864045977592468, 0.9892830848693848, 0.9913769960403442], "prob_old_token": [0.923000693321228, 0.00012149175745435059, 7.654683577129617e-05, 2.0177494661766104e-05, 1.126178540289402e-05, 1.0001786904467735e-05, 9.297146789322142e-06, 7.983981049619615e-06, 6.478333943960024e-06, 5.169673841010081e-06], "l1-model.layers.2.mlp.down_proj.weight": [74092.609375], "l2-model.layers.2.mlp.down_proj.weight": [12.709808349609375], "linf-model.layers.2.mlp.down_proj.weight": [0.0044973138719797134], "request": {"prompt": "{} borders with", "subject": "Benin", "target_new": {"str": "Colombia"}, "old_answer": {"str": "Nigeria"}, "seed": 42}}, {"loss_per_step": [11.165, 0.977, 0.154, 0.021, 0.019, 0.012, 0.007], "prob_new": [1.4163536434352864e-05, 0.3764370381832123, 0.8574837446212769, 0.9792510271072388, 0.9808875322341919, 0.9877442121505737, 0.9927027821540833], "prob_old": [0.9526063203811646, 0.5004441738128662, 0.4982302486896515, 0.49965009093284607, 0.49977341294288635, 0.4998141825199127, 0.4998384714126587], "prob_new_token": [1.4163536434352864e-05, 0.3764370381832123, 0.8574837446212769, 0.9792510271072388, 0.9808875322341919, 0.9877442121505737, 0.9927027821540833], "prob_old_token": [0.9069492220878601, 0.0030753149185329676, 2.6734816856333055e-05, 1.1091765284021449e-09, 1.2416212502586177e-10, 5.077179995871539e-11, 2.8797160836480096e-11], "l1-model.layers.2.mlp.down_proj.weight": [61665.046875], "l2-model.layers.2.mlp.down_proj.weight": [10.351739883422852], "linf-model.layers.2.mlp.down_proj.weight": [0.002989126369357109], "request": {"prompt": "{} borders with", "subject": "Papua New Guinea", "target_new": {"str": "Colombia"}, "old_answer": {"str": "Indonesia"}, "seed": 42}}, {"loss_per_step": [19.842, 11.166, 5.074, 0.788, 0.004], "prob_new": [2.413021338298904e-09, 1.4148564332572278e-05, 0.0062596723437309265, 0.45496323704719543, 0.9963409304618835], "prob_old": [0.9526063203811646, 0.5117180347442627, 0.4966028034687042, 0.4988100230693817, 0.49400031566619873], "prob_new_token": [2.413021338298904e-09, 1.4148564332572278e-05, 0.0062596723437309265, 0.45496323704719543, 0.9963409304618835], "prob_old_token": [0.9069492220878601, 0.02580302022397518, 0.0003150093834847212, 2.3726339350105263e-05, 8.557177011425665e-08], "l1-model.layers.2.mlp.down_proj.weight": [47066.484375], "l2-model.layers.2.mlp.down_proj.weight": [7.973536968231201], "linf-model.layers.2.mlp.down_proj.weight": [0.002005837857723236], "request": {"prompt": "{} borders with", "subject": "Papua New Guinea", "target_new": {"str": "Madrid"}, "old_answer": {"str": "Indonesia"}, "seed": 42}}, {"loss_per_step": [4.928, 1.703, 1.244, 0.095, 0.012, 0.007], "prob_new": [0.33794158697128296, 0.6670279502868652, 0.6603294610977173, 0.9156924486160278, 0.9880334734916687, 0.9934963583946228], "prob_old": [0.9526063203811646, 0.5030288100242615, 0.47854238748550415, 0.493785560131073, 0.4967072904109955, 0.4980766773223877], "prob_new_token": [2.529126504668966e-05, 0.006071576848626137, 0.02502596005797386, 0.7686676383018494, 0.9825016856193542, 0.9894539713859558], "prob_old_token": [0.9069492220878601, 0.0067832558415830135, 0.0007203790592029691, 0.0001471551222493872, 4.166211056144675e-07, 1.5516359752609787e-08], "l1-model.layers.2.mlp.down_proj.weight": [55496.1875], "l2-model.layers.2.mlp.down_proj.weight": [9.390589714050293], "linf-model.layers.2.mlp.down_proj.weight": [0.0025023852940648794], "request": {"prompt": "{} borders with", "subject": "Papua New Guinea", "target_new": {"str": "Morocco"}, "old_answer": {"str": "Indonesia"}, "seed": 42}}, {"loss_per_step": [7.472, 1.705, 2.158, 0.082, 0.038, 0.017, 0.011, 0.009], "prob_new": [0.48138025403022766, 0.5093784332275391, 0.3697775602340698, 0.9209624528884888, 0.9632286429405212, 0.9834465980529785, 0.9893311262130737, 0.9913119077682495], "prob_old": [0.9879899621009827, 0.0647047758102417, 0.0006201867363415658, 0.00013953748566564173, 3.880059375660494e-05, 1.1943181561946403e-05, 4.532100319920573e-06, 2.236014324807911e-06], "prob_new_token": [3.357226319167239e-07, 0.0335102453827858, 0.018511349335312843, 0.9295393824577332, 0.9721471071243286, 0.9850253462791443, 0.9895115494728088, 0.9916960597038269], "prob_old_token": [0.9879899621009827, 0.0647047758102417, 0.0006201867363415658, 0.00013953748566564173, 3.880059375660494e-05, 1.1943181561946403e-05, 4.532100319920573e-06, 2.236014324807911e-06], "l1-model.layers.2.mlp.down_proj.weight": [63089.25], "l2-model.layers.2.mlp.down_proj.weight": [11.01220703125], "linf-model.layers.2.mlp.down_proj.weight": [0.003438986837863922], "request": {"prompt": "{} borders with", "subject": "Bangladesh", "target_new": {"str": "Nigeria"}, "old_answer": {"str": "India"}, "seed": 42}}, {"loss_per_step": [12.014, 1.189, 0.013, 0.04, 0.002], "prob_new": [6.059619408915751e-06, 0.30457112193107605, 0.9872893691062927, 0.9610577821731567, 0.9982206225395203], "prob_old": [0.9879899621009827, 0.0008973319781944156, 0.0005160864093340933, 0.00036905184970237315, 5.567995685851201e-06], "prob_new_token": [6.059619408915751e-06, 0.30457112193107605, 0.9872893691062927, 0.9610577821731567, 0.9982206225395203], "prob_old_token": [0.9879899621009827, 0.0008973319781944156, 0.0005160864093340933, 0.00036905184970237315, 5.567995685851201e-06], "l1-model.layers.2.mlp.down_proj.weight": [48137.93359375], "l2-model.layers.2.mlp.down_proj.weight": [8.177915573120117], "linf-model.layers.2.mlp.down_proj.weight": [0.0020050229504704475], "request": {"prompt": "{} borders with", "subject": "Bangladesh", "target_new": {"str": "England"}, "old_answer": {"str": "India"}, "seed": 42}}, {"loss_per_step": [17.877, 5.58, 9.943, 0.25, 0.005], "prob_new": [1.7223694470658302e-08, 0.0037731342017650604, 4.8058416723506525e-05, 0.778437614440918, 0.9951804876327515], "prob_old": [0.9879899621009827, 0.13133211433887482, 4.956773773301393e-05, 0.0007310478831641376, 2.925935973507876e-07], "prob_new_token": [1.7223694470658302e-08, 0.0037731342017650604, 4.8058416723506525e-05, 0.778437614440918, 0.9951804876327515], "prob_old_token": [0.9879899621009827, 0.13133211433887482, 4.956773773301393e-05, 0.0007310478831641376, 2.925935973507876e-07], "l1-model.layers.2.mlp.down_proj.weight": [44019.859375], "l2-model.layers.2.mlp.down_proj.weight": [7.6006975173950195], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058024674654007], "request": {"prompt": "{} borders with", "subject": "Bangladesh", "target_new": {"str": "Colombia"}, "old_answer": {"str": "India"}, "seed": 42}}, {"loss_per_step": [8.677, 3.8, 1.209, 0.024, 0.013, 0.012, 0.012, 0.009], "prob_new": [0.49284449219703674, 0.4944758415222168, 0.541378378868103, 0.9769331812858582, 0.9871442317962646, 0.9877561926841736, 0.987952709197998, 0.9906554818153381], "prob_old": [0.8676134943962097, 0.4600846469402313, 0.19901347160339355, 0.05595988780260086, 0.05690521001815796, 0.051048316061496735, 0.04801730439066887, 0.048800475895404816], "prob_new_token": [2.9464876760698644e-08, 0.0005063907010480762, 0.08963760733604431, 0.9580778479576111, 0.9813066720962524, 0.9879686832427979, 0.991777241230011, 0.9945381283760071], "prob_old_token": [0.641300618648529, 0.1400367021560669, 0.050545040518045425, 0.00032694623223505914, 0.00014799900236539543, 9.29982925299555e-05, 5.360092472983524e-05, 2.7909814889426343e-05], "l1-model.layers.2.mlp.down_proj.weight": [65015.26171875], "l2-model.layers.2.mlp.down_proj.weight": [11.228864669799805], "linf-model.layers.2.mlp.down_proj.weight": [0.0034912433475255966], "request": {"prompt": "{} borders with", "subject": "Republic of Ireland", "target_new": {"str": "Syria"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [14.523, 5.065, 0.161, 0.003], "prob_new": [4.927875352223055e-07, 0.006312438286840916, 0.85126793384552, 0.996859610080719], "prob_old": [0.8676134943962097, 0.13189741969108582, 0.09686844050884247, 0.1242661103606224], "prob_new_token": [4.927875352223055e-07, 0.006312438286840916, 0.85126793384552, 0.996859610080719], "prob_old_token": [0.641300618648529, 0.18384699523448944, 0.007337534334510565, 8.072414493653923e-05], "l1-model.layers.2.mlp.down_proj.weight": [41319.49609375], "l2-model.layers.2.mlp.down_proj.weight": [6.8571343421936035], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} borders with", "subject": "Republic of Ireland", "target_new": {"str": "Mexico"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [8.66, 4.465, 1.531, 0.119, 0.025, 0.014, 0.009], "prob_new": [0.49964380264282227, 0.4873829782009125, 0.5230656266212463, 0.8943264484405518, 0.9758155345916748, 0.9858846664428711, 0.9908891916275024], "prob_old": [0.8676134943962097, 0.2811726927757263, 0.029312288388609886, 0.07387849688529968, 0.05576784163713455, 0.03692729398608208, 0.028894852846860886], "prob_new_token": [3.0084958524412286e-08, 0.00013592385221272707, 0.04681943356990814, 0.7894325256347656, 0.9524316191673279, 0.9725942015647888, 0.9826155304908752], "prob_old_token": [0.641300618648529, 0.1265963613986969, 0.017317580059170723, 0.00998147763311863, 0.003093540435656905, 0.0018105552298948169, 0.0011526591842994094], "l1-model.layers.2.mlp.down_proj.weight": [59335.87109375], "l2-model.layers.2.mlp.down_proj.weight": [10.223592758178711], "linf-model.layers.2.mlp.down_proj.weight": [0.0029685485642403364], "request": {"prompt": "{} borders with", "subject": "Republic of Ireland", "target_new": {"str": "Indonesia"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [17.898, 11.623, 4.862, 0.027, 0.006], "prob_new": [1.685969053255576e-08, 8.960483683040366e-06, 0.007734707091003656, 0.9737876653671265, 0.99442458152771], "prob_old": [0.9967235922813416, 0.657139778137207, 0.3977951109409332, 0.45833325386047363, 0.4478687345981598], "prob_new_token": [1.685969053255576e-08, 8.960483683040366e-06, 0.007734707091003656, 0.9737876653671265, 0.99442458152771], "prob_old_token": [0.9951292276382446, 0.3204877972602844, 2.1608977931464324e-06, 7.98672317614546e-06, 1.1561390920178383e-06], "l1-model.layers.2.mlp.down_proj.weight": [45959.3046875], "l2-model.layers.2.mlp.down_proj.weight": [7.835949897766113], "linf-model.layers.2.mlp.down_proj.weight": [0.0020042043179273605], "request": {"prompt": "{} borders with", "subject": "East Timor", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Indonesia"}, "seed": 42}}, {"loss_per_step": [16.509, 7.937, 3.658, 0.737, 0.113, 0.06, 0.044, 0.033, 0.024, 0.018, 0.014, 0.011, 0.009], "prob_new": [6.764933146996555e-08, 0.00035723316250368953, 0.025784356519579887, 0.4787723422050476, 0.8932795524597168, 0.9421777129173279, 0.9573009610176086, 0.9679428339004517, 0.9761088490486145, 0.9819251894950867, 0.986099362373352, 0.9891120195388794, 0.9913033843040466], "prob_old": [0.9967235922813416, 0.6209602355957031, 0.4970325529575348, 0.49978020787239075, 0.4994252920150757, 0.49930453300476074, 0.499223530292511, 0.49918246269226074, 0.4991721212863922, 0.4991883337497711, 0.4992196261882782, 0.49925047159194946, 0.49927422404289246], "prob_new_token": [6.764933146996555e-08, 0.00035723316250368953, 0.025784356519579887, 0.4787723422050476, 0.8932795524597168, 0.9421777129173279, 0.9573009610176086, 0.9679428339004517, 0.9761088490486145, 0.9819251894950867, 0.986099362373352, 0.9891120195388794, 0.9913033843040466], "prob_old_token": [0.9951292276382446, 0.2464180439710617, 5.241641338216141e-05, 0.0006020998698659241, 5.980421065032715e-06, 5.045126272307243e-07, 1.6328067431459203e-07, 8.768161308125855e-08, 5.5561660161629334e-08, 3.883278054672701e-08, 2.8373356997235533e-08, 2.1429510610460056e-08, 1.6706040995018157e-08], "l1-model.layers.2.mlp.down_proj.weight": [82354.546875], "l2-model.layers.2.mlp.down_proj.weight": [14.419588088989258], "linf-model.layers.2.mlp.down_proj.weight": [0.005661904811859131], "request": {"prompt": "{} borders with", "subject": "East Timor", "target_new": {"str": "England"}, "old_answer": {"str": "Indonesia"}, "seed": 42}}, {"loss_per_step": [5.911, 2.398, 2.138, 0.037, 0.026, 0.015, 0.009], "prob_new": [0.663870632648468, 0.6664804816246033, 0.6572471857070923, 0.9644829630851746, 0.9752098321914673, 0.9856240749359131, 0.9914004802703857], "prob_old": [0.9967235922813416, 0.6300725936889648, 0.46831104159355164, 0.4960964620113373, 0.495266318321228, 0.49427530169487, 0.49336153268814087], "prob_new_token": [2.0044687687459373e-08, 0.0007526786648668349, 0.0016912119463086128, 0.9001075029373169, 0.933297872543335, 0.9630401134490967, 0.9788413643836975], "prob_old_token": [0.9951292276382446, 0.2642529308795929, 6.673188636341365e-06, 2.7952219170401804e-05, 1.9564646208891645e-05, 8.737884854781441e-06, 3.882804321619915e-06], "l1-model.layers.2.mlp.down_proj.weight": [61235.69921875], "l2-model.layers.2.mlp.down_proj.weight": [10.395586967468262], "linf-model.layers.2.mlp.down_proj.weight": [0.0029990291222929955], "request": {"prompt": "{} borders with", "subject": "East Timor", "target_new": {"str": "Namibia"}, "old_answer": {"str": "Indonesia"}, "seed": 42}}, {"loss_per_step": [6.066, 1.447, 0.461, 0.058, 0.014, 0.005], "prob_new": [0.033116064965724945, 0.33204707503318787, 0.6403286457061768, 0.9450048208236694, 0.9859147071838379, 0.9951881170272827], "prob_old": [0.9603095650672913, 0.5051087737083435, 0.4898158609867096, 0.4959205985069275, 0.49747124314308167, 0.498454213142395], "prob_new_token": [0.0013411487452685833, 0.17878323793411255, 0.48943233489990234, 0.8801656365394592, 0.9725273847579956, 0.9913240671157837], "prob_old_token": [0.9207631349563599, 0.011497828178107738, 0.00010208770981989801, 0.00014379619096871465, 1.9617053112597205e-05, 3.6168321457807906e-06], "l1-model.layers.2.mlp.down_proj.weight": [55763.90625], "l2-model.layers.2.mlp.down_proj.weight": [9.342089653015137], "linf-model.layers.2.mlp.down_proj.weight": [0.0024854587391018867], "request": {"prompt": "{} borders with", "subject": "Malaysia", "target_new": {"str": "the United Kingdom"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [22.65, 10.745, 4.548, 0.333, 0.002], "prob_new": [1.45578618604425e-10, 2.155518814106472e-05, 0.010587033815681934, 0.7165569067001343, 0.998378336429596], "prob_old": [0.9603095650672913, 0.4910820722579956, 0.47888973355293274, 0.4743495583534241, 0.44174742698669434], "prob_new_token": [1.45578618604425e-10, 2.155518814106472e-05, 0.010587033815681934, 0.7165569067001343, 0.998378336429596], "prob_old_token": [0.9207631349563599, 0.006952913478016853, 0.00037154590245336294, 0.00016909287660382688, 7.992555310920579e-07], "l1-model.layers.2.mlp.down_proj.weight": [47526.25390625], "l2-model.layers.2.mlp.down_proj.weight": [8.022896766662598], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058248192071915], "request": {"prompt": "{} borders with", "subject": "Malaysia", "target_new": {"str": "Madrid"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [1.946, 0.055, 0.003], "prob_new": [0.5101141929626465, 0.9479819536209106, 0.9968135356903076], "prob_old": [0.9603095650672913, 0.4982224106788635, 0.49714553356170654], "prob_new_token": [0.02039150707423687, 0.8972544074058533, 0.9945422410964966], "prob_old_token": [0.9207631349563599, 0.0005821704980917275, 5.2512750698952004e-06], "l1-model.layers.2.mlp.down_proj.weight": [36716.59765625], "l2-model.layers.2.mlp.down_proj.weight": [5.529489040374756], "linf-model.layers.2.mlp.down_proj.weight": [0.001000677701085806], "request": {"prompt": "{} borders with", "subject": "Malaysia", "target_new": {"str": "Indonesia"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [4.496, 2.973, 0.91, 0.001], "prob_new": [0.6190763711929321, 0.45885366201400757, 0.6878239512443542, 0.9990217089653015], "prob_old": [0.7711874842643738, 0.2963330149650574, 0.3914792239665985, 0.28377658128738403], "prob_new_token": [1.6200932577703497e-06, 0.0003418468404561281, 0.0654313713312149, 0.997090756893158], "prob_old_token": [0.899784505367279, 0.1702362447977066, 0.2596951723098755, 0.0007173564517870545], "l1-model.layers.2.mlp.down_proj.weight": [41458.453125], "l2-model.layers.2.mlp.down_proj.weight": [6.7679266929626465], "linf-model.layers.2.mlp.down_proj.weight": [0.001502479426562786], "request": {"prompt": "{} has acquired their education at", "subject": "Justin Gatlin", "target_new": {"str": "Princeton"}, "old_answer": {"str": "the University of Tennessee"}, "seed": 42}}, {"loss_per_step": [2.126, 0.517, 0.135, 0.033, 0.013, 0.005], "prob_new": [0.7283803224563599, 0.7547837495803833, 0.8975054621696472, 0.9688658714294434, 0.9876128435134888, 0.9946224689483643], "prob_old": [0.7711874842643738, 0.468650758266449, 0.3771788775920868, 0.465118944644928, 0.48780176043510437, 0.49637460708618164], "prob_new_token": [0.899784505367279, 0.5467022657394409, 0.5018427968025208, 0.8520079255104065, 0.9459050297737122, 0.9811444878578186], "prob_old_token": [0.899784505367279, 0.5467022657394409, 0.5018427968025208, 0.8520079255104065, 0.9459050297737122, 0.9811444878578186], "l1-model.layers.2.mlp.down_proj.weight": [57839.796875], "l2-model.layers.2.mlp.down_proj.weight": [9.51760196685791], "linf-model.layers.2.mlp.down_proj.weight": [0.002500961534678936], "request": {"prompt": "{} has acquired their education at", "subject": "Justin Gatlin", "target_new": {"str": "the London School of Economics"}, "old_answer": {"str": "the University of Tennessee"}, "seed": 42}}, {"loss_per_step": [6.083, 1.463, 0.078, 0.037, 0.023, 0.016, 0.012, 0.009], "prob_new": [0.33441537618637085, 0.36401885747909546, 0.9271783828735352, 0.9642472863197327, 0.977237343788147, 0.9840999841690063, 0.9883283972740173, 0.9912573099136353], "prob_old": [0.7711874842643738, 0.30075234174728394, 0.22557063400745392, 0.21255427598953247, 0.21585744619369507, 0.220379039645195, 0.22352340817451477, 0.22533974051475525], "prob_new_token": [7.774927325954195e-06, 0.08316230028867722, 0.9973199963569641, 0.9974153637886047, 0.9978646636009216, 0.9986120462417603, 0.9990322589874268, 0.9992610216140747], "prob_old_token": [0.899784505367279, 0.22008967399597168, 0.0011384999379515648, 0.001116798259317875, 0.001061340793967247, 0.0007823232444934547, 0.0005918263923376799, 0.0004731083463411778], "l1-model.layers.2.mlp.down_proj.weight": [68828.0546875], "l2-model.layers.2.mlp.down_proj.weight": [11.483763694763184], "linf-model.layers.2.mlp.down_proj.weight": [0.003492012619972229], "request": {"prompt": "{} has acquired their education at", "subject": "Justin Gatlin", "target_new": {"str": "Oxford University"}, "old_answer": {"str": "the University of Tennessee"}, "seed": 42}}, {"loss_per_step": [1.526, 0.721, 0.166, 0.015, 0.007], "prob_new": [0.6975437998771667, 0.5829757452011108, 0.8508873581886292, 0.9849838018417358, 0.993396520614624], "prob_old": [0.9237146377563477, 0.5657702684402466, 0.709060549736023, 0.7876824140548706, 0.7937394380569458], "prob_new_token": [0.8842815160751343, 0.16886784136295319, 0.7685824632644653, 0.9683635830879211, 0.9864624738693237], "prob_old_token": [0.8842815160751343, 0.16886784136295319, 0.7685824632644653, 0.9683635830879211, 0.9864624738693237], "l1-model.layers.2.mlp.down_proj.weight": [48550.5390625], "l2-model.layers.2.mlp.down_proj.weight": [8.208944320678711], "linf-model.layers.2.mlp.down_proj.weight": [0.0020040832459926605], "request": {"prompt": "{} has acquired their education at", "subject": "Carrie Lam", "target_new": {"str": "the University of Sydney"}, "old_answer": {"str": "the University of Hong Kong"}, "seed": 42}}, {"loss_per_step": [2.098, 1.616, 0.512, 0.039, 0.012, 0.004], "prob_new": [0.7974426746368408, 0.5798363089561462, 0.7282546758651733, 0.9629818797111511, 0.9880203008651733, 0.996272087097168], "prob_old": [0.9237146377563477, 0.5038377046585083, 0.6447522044181824, 0.7530418038368225, 0.778978705406189, 0.7814508080482483], "prob_new_token": [0.8842815160751343, 0.29767417907714844, 0.5060040354728699, 0.8667739629745483, 0.9536862969398499, 0.9848748445510864], "prob_old_token": [0.8842815160751343, 0.29767417907714844, 0.5060040354728699, 0.8667739629745483, 0.9536862969398499, 0.9848748445510864], "l1-model.layers.2.mlp.down_proj.weight": [58211.3671875], "l2-model.layers.2.mlp.down_proj.weight": [9.527450561523438], "linf-model.layers.2.mlp.down_proj.weight": [0.0025017696898430586], "request": {"prompt": "{} has acquired their education at", "subject": "Carrie Lam", "target_new": {"str": "the University of Adelaide"}, "old_answer": {"str": "the University of Hong Kong"}, "seed": 42}}, {"loss_per_step": [1.74, 1.759, 0.928, 0.288, 0.075, 0.074, 0.005], "prob_new": [0.823897123336792, 0.625807523727417, 0.7463622689247131, 0.8223713636398315, 0.9355985522270203, 0.9365290403366089, 0.9954361915588379], "prob_old": [0.9237146377563477, 0.5085987448692322, 0.6344336271286011, 0.6932768225669861, 0.6991305351257324, 0.6968783736228943, 0.7757905125617981], "prob_new_token": [0.8842815160751343, 0.4192312955856323, 0.47831597924232483, 0.7526139616966248, 0.6691566705703735, 0.8890540599822998, 0.9772441983222961], "prob_old_token": [0.8842815160751343, 0.4192312955856323, 0.47831597924232483, 0.7526139616966248, 0.6691566705703735, 0.8890540599822998, 0.9772441983222961], "l1-model.layers.2.mlp.down_proj.weight": [61251.953125], "l2-model.layers.2.mlp.down_proj.weight": [10.342100143432617], "linf-model.layers.2.mlp.down_proj.weight": [0.0030136285349726677], "request": {"prompt": "{} has acquired their education at", "subject": "Carrie Lam", "target_new": {"str": "the University of Witwatersrand"}, "old_answer": {"str": "the University of Hong Kong"}, "seed": 42}}, {"loss_per_step": [2.719, 0.49, 0.058, 0.011, 0.005], "prob_new": [0.2765352427959442, 0.6574718952178955, 0.9452893733978271, 0.9889090061187744, 0.9950498938560486], "prob_old": [0.952218770980835, 0.0036449339240789413, 5.18961860507261e-05, 1.3527538612834178e-05, 3.161120048389421e-06], "prob_new_token": [0.022677123546600342, 0.4503992199897766, 0.9188984632492065, 0.9696374535560608, 0.9849779009819031], "prob_old_token": [0.952218770980835, 0.0036449339240789413, 5.18961860507261e-05, 1.3527538612834178e-05, 3.161120048389421e-06], "l1-model.layers.2.mlp.down_proj.weight": [47202.359375], "l2-model.layers.2.mlp.down_proj.weight": [8.080405235290527], "linf-model.layers.2.mlp.down_proj.weight": [0.00200124504044652], "request": {"prompt": "{} has acquired their education at", "subject": "John F. Kennedy", "target_new": {"str": "the University of Virginia"}, "old_answer": {"str": "Harvard"}, "seed": 42}}, {"loss_per_step": [5.354, 2.278, 0.832, 0.071, 0.011, 0.007], "prob_new": [0.257196307182312, 0.36080825328826904, 0.6704007983207703, 0.9333838224411011, 0.98909592628479, 0.9931994080543518], "prob_old": [0.952218770980835, 0.00775209441781044, 0.0016147956484928727, 0.0005574661190621555, 3.5824366932502016e-05, 8.68978622747818e-06], "prob_new_token": [0.022677123546600342, 0.2170354276895523, 0.817559003829956, 0.8418919444084167, 0.9661745429039001, 0.9790148735046387], "prob_old_token": [0.952218770980835, 0.00775209441781044, 0.0016147956484928727, 0.0005574661190621555, 3.5824366932502016e-05, 8.68978622747818e-06], "l1-model.layers.2.mlp.down_proj.weight": [56790.1484375], "l2-model.layers.2.mlp.down_proj.weight": [9.463338851928711], "linf-model.layers.2.mlp.down_proj.weight": [0.0024728924036026], "request": {"prompt": "{} has acquired their education at", "subject": "John F. Kennedy", "target_new": {"str": "the University of Tennessee"}, "old_answer": {"str": "Harvard"}, "seed": 42}}, {"loss_per_step": [3.868, 0.799, 0.172, 0.029, 0.014, 0.006], "prob_new": [0.31928181648254395, 0.5353156924247742, 0.8623144626617432, 0.9717074036598206, 0.9861547946929932, 0.9941750168800354], "prob_old": [0.952218770980835, 0.001216655713506043, 0.0001762294996296987, 2.1202193238423206e-05, 2.988638470924343e-06, 6.643448386967066e-07], "prob_new_token": [0.022677123546600342, 0.2717509865760803, 0.8389751315116882, 0.9356827139854431, 0.9691709280014038, 0.9876954555511475], "prob_old_token": [0.952218770980835, 0.001216655713506043, 0.0001762294996296987, 2.1202193238423206e-05, 2.988638470924343e-06, 6.643448386967066e-07], "l1-model.layers.2.mlp.down_proj.weight": [57921.5078125], "l2-model.layers.2.mlp.down_proj.weight": [9.495131492614746], "linf-model.layers.2.mlp.down_proj.weight": [0.0025054854340851307], "request": {"prompt": "{} has acquired their education at", "subject": "John F. Kennedy", "target_new": {"str": "the University of Wisconsin-Superior"}, "old_answer": {"str": "Harvard"}, "seed": 42}}, {"loss_per_step": [2.725, 0.465, 0.043, 0.006], "prob_new": [0.6047391891479492, 0.6859060525894165, 0.9600281119346619, 0.994408130645752], "prob_old": [0.9496051073074341, 0.2772800326347351, 0.24805767834186554, 0.247616246342659], "prob_new_token": [0.00034600670915097, 0.3488718569278717, 0.9999859929084778, 0.9999973773956299], "prob_old_token": [0.9275648593902588, 0.12795159220695496, 1.586821099408553e-06, 4.84321958538203e-07], "l1-model.layers.2.mlp.down_proj.weight": [42146.6171875], "l2-model.layers.2.mlp.down_proj.weight": [6.903675556182861], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024635940790176], "request": {"prompt": "{} has acquired their education at", "subject": "Dwayne Johnson", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "the University of Miami"}, "seed": 42}}, {"loss_per_step": [3.067, 1.385, 0.531, 0.114, 0.061, 0.017, 0.004], "prob_new": [0.6617727279663086, 0.675754964351654, 0.7548477649688721, 0.9086825251579285, 0.9439689517021179, 0.983437180519104, 0.9962862730026245], "prob_old": [0.9496051073074341, 0.5134495496749878, 0.6127398014068604, 0.6167498230934143, 0.7162164449691772, 0.7385287284851074, 0.7459406852722168], "prob_new_token": [0.9275648593902588, 0.3031494915485382, 0.665062427520752, 0.5698278546333313, 0.9246304035186768, 0.971582293510437, 0.9881798028945923], "prob_old_token": [0.9275648593902588, 0.3031494915485382, 0.665062427520752, 0.5698278546333313, 0.9246304035186768, 0.971582293510437, 0.9881798028945923], "l1-model.layers.2.mlp.down_proj.weight": [60456.15234375], "l2-model.layers.2.mlp.down_proj.weight": [10.212427139282227], "linf-model.layers.2.mlp.down_proj.weight": [0.002999318763613701], "request": {"prompt": "{} has acquired their education at", "subject": "Dwayne Johnson", "target_new": {"str": "the University of Adelaide"}, "old_answer": {"str": "the University of Miami"}, "seed": 42}}, {"loss_per_step": [3.53, 1.54, 0.436, 0.026, 0.026, 0.017, 0.01], "prob_new": [0.7225516438484192, 0.655507504940033, 0.7940590381622314, 0.9744135141372681, 0.9746683239936829, 0.9836392402648926, 0.9905304312705994], "prob_old": [0.9496051073074341, 0.5695679783821106, 0.5148854851722717, 0.7265633940696716, 0.7206479907035828, 0.7306271195411682, 0.7388246655464172], "prob_new_token": [0.9275648593902588, 0.48146316409111023, 0.9277037978172302, 0.9425845742225647, 0.9196259379386902, 0.9465821385383606, 0.9696200489997864], "prob_old_token": [0.9275648593902588, 0.48146316409111023, 0.9277037978172302, 0.9425845742225647, 0.9196259379386902, 0.9465821385383606, 0.9696200489997864], "l1-model.layers.2.mlp.down_proj.weight": [57425.60546875], "l2-model.layers.2.mlp.down_proj.weight": [9.998688697814941], "linf-model.layers.2.mlp.down_proj.weight": [0.0029947995208203793], "request": {"prompt": "{} has acquired their education at", "subject": "Dwayne Johnson", "target_new": {"str": "the University of Ghana"}, "old_answer": {"str": "the University of Miami"}, "seed": 42}}, {"loss_per_step": [4.311, 0.424, 1.947, 0.196, 0.04, 0.02, 0.012, 0.008], "prob_new": [0.4900384545326233, 0.6972099542617798, 0.3262189030647278, 0.8352376818656921, 0.9626022577285767, 0.9802467226982117, 0.9879685640335083, 0.9921700358390808], "prob_old": [0.969667911529541, 0.7007825374603271, 0.5479472279548645, 0.6376185417175293, 0.7030571103096008, 0.7179166078567505, 0.7218297719955444, 0.7209219932556152], "prob_new_token": [0.9433932304382324, 0.8017754554748535, 0.21889793872833252, 0.6373779773712158, 0.8734710216522217, 0.9302648305892944, 0.9582120180130005, 0.9737468361854553], "prob_old_token": [0.9433932304382324, 0.8017754554748535, 0.21889793872833252, 0.6373779773712158, 0.8734710216522217, 0.9302648305892944, 0.9582120180130005, 0.9737468361854553], "l1-model.layers.2.mlp.down_proj.weight": [65707.09375], "l2-model.layers.2.mlp.down_proj.weight": [11.170563697814941], "linf-model.layers.2.mlp.down_proj.weight": [0.0034721866250038147], "request": {"prompt": "{} has acquired their education at", "subject": "Ir\u00e8ne Joliot-Curie", "target_new": {"str": "the University of Glasgow"}, "old_answer": {"str": "the Sorbonne"}, "seed": 42}}, {"loss_per_step": [4.187, 0.48, 0.165, 0.009], "prob_new": [0.4119446873664856, 0.67120760679245, 0.8638316988945007, 0.9913192987442017], "prob_old": [0.969667911529541, 0.619430422782898, 0.6777558326721191, 0.7423854470252991], "prob_new_token": [0.9433932304382324, 0.46689414978027344, 0.7120078206062317, 0.9702780842781067], "prob_old_token": [0.9433932304382324, 0.46689414978027344, 0.7120078206062317, 0.9702780842781067], "l1-model.layers.2.mlp.down_proj.weight": [44067.66015625], "l2-model.layers.2.mlp.down_proj.weight": [7.013527870178223], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024691820144653], "request": {"prompt": "{} has acquired their education at", "subject": "Ir\u00e8ne Joliot-Curie", "target_new": {"str": "the University of Havana"}, "old_answer": {"str": "the Sorbonne"}, "seed": 42}}, {"loss_per_step": [1.977, 0.18, 0.113, 0.013, 0.004], "prob_new": [0.7649633884429932, 0.8493306636810303, 0.9123085737228394, 0.9872209429740906, 0.995556116104126], "prob_old": [0.969667911529541, 0.6747071146965027, 0.6378968954086304, 0.7417905330657959, 0.7482268810272217], "prob_new_token": [0.9433932304382324, 0.6980723738670349, 0.551539957523346, 0.9671684503555298, 0.9929176568984985], "prob_old_token": [0.9433932304382324, 0.6980723738670349, 0.551539957523346, 0.9671684503555298, 0.9929176568984985], "l1-model.layers.2.mlp.down_proj.weight": [51515.38671875], "l2-model.layers.2.mlp.down_proj.weight": [8.348883628845215], "linf-model.layers.2.mlp.down_proj.weight": [0.002005252754315734], "request": {"prompt": "{} has acquired their education at", "subject": "Ir\u00e8ne Joliot-Curie", "target_new": {"str": "the London School of Economics"}, "old_answer": {"str": "the Sorbonne"}, "seed": 42}}, {"loss_per_step": [4.318, 0.588, 0.161, 0.024, 0.02, 0.018, 0.008], "prob_new": [0.32342442870140076, 0.6417633891105652, 0.860008716583252, 0.9769024848937988, 0.9806419610977173, 0.982962429523468, 0.9916149377822876], "prob_old": [0.9525465369224548, 0.5489128828048706, 0.6029993295669556, 0.6501886248588562, 0.6416627168655396, 0.6407210230827332, 0.6478304266929626], "prob_new_token": [0.810407817363739, 0.2658335268497467, 0.68036288022995, 0.9661293029785156, 0.915503978729248, 0.9259653687477112, 0.9704115390777588], "prob_old_token": [0.810407817363739, 0.2658335268497467, 0.68036288022995, 0.9661293029785156, 0.915503978729248, 0.9259653687477112, 0.9704115390777588], "l1-model.layers.2.mlp.down_proj.weight": [65770.6953125], "l2-model.layers.2.mlp.down_proj.weight": [10.669997215270996], "linf-model.layers.2.mlp.down_proj.weight": [0.003008075524121523], "request": {"prompt": "{} has acquired their education at", "subject": "Wangari Muta Maathai", "target_new": {"str": "the Royal Academy of Music"}, "old_answer": {"str": "the University of Nairobi"}, "seed": 42}}, {"loss_per_step": [3.404, 1.433, 0.455, 0.038, 0.038, 0.032, 0.021, 0.014, 0.01], "prob_new": [0.7438986897468567, 0.6310645937919617, 0.752157986164093, 0.9638798832893372, 0.9636562466621399, 0.968723714351654, 0.9793595671653748, 0.986454963684082, 0.9904837608337402], "prob_old": [0.9525465369224548, 0.6479697227478027, 0.6016412377357483, 0.6382842063903809, 0.641624391078949, 0.6501648426055908, 0.6630259156227112, 0.6727306842803955, 0.6794686317443848], "prob_new_token": [0.810407817363739, 0.6615487337112427, 0.8223134875297546, 0.9193882942199707, 0.9037773013114929, 0.9089942574501038, 0.9429667592048645, 0.9662440419197083, 0.9788976907730103], "prob_old_token": [0.810407817363739, 0.6615487337112427, 0.8223134875297546, 0.9193882942199707, 0.9037773013114929, 0.9089942574501038, 0.9429667592048645, 0.9662440419197083, 0.9788976907730103], "l1-model.layers.2.mlp.down_proj.weight": [75114.234375], "l2-model.layers.2.mlp.down_proj.weight": [12.36024284362793], "linf-model.layers.2.mlp.down_proj.weight": [0.003980511799454689], "request": {"prompt": "{} has acquired their education at", "subject": "Wangari Muta Maathai", "target_new": {"str": "the University of Buenos Aires"}, "old_answer": {"str": "the University of Nairobi"}, "seed": 42}}, {"loss_per_step": [2.344, 0.808, 0.177, 0.034, 0.017, 0.012, 0.009], "prob_new": [0.6827627420425415, 0.5087999701499939, 0.8422497510910034, 0.9666658043861389, 0.9828484058380127, 0.9884131550788879, 0.9906847476959229], "prob_old": [0.9525465369224548, 0.6189039945602417, 0.7582787275314331, 0.8051968812942505, 0.8107177019119263, 0.8104264140129089, 0.8085561990737915], "prob_new_token": [0.810407817363739, 0.41633832454681396, 0.7940945029258728, 0.911078929901123, 0.952349841594696, 0.9673280715942383, 0.973721981048584], "prob_old_token": [0.810407817363739, 0.41633832454681396, 0.7940945029258728, 0.911078929901123, 0.952349841594696, 0.9673280715942383, 0.973721981048584], "l1-model.layers.2.mlp.down_proj.weight": [65834.8125], "l2-model.layers.2.mlp.down_proj.weight": [10.61493968963623], "linf-model.layers.2.mlp.down_proj.weight": [0.0029710642993450165], "request": {"prompt": "{} has acquired their education at", "subject": "Wangari Muta Maathai", "target_new": {"str": "the University of Edinburgh"}, "old_answer": {"str": "the University of Nairobi"}, "seed": 42}}, {"loss_per_step": [3.486, 0.883, 0.296, 0.023, 0.014, 0.009], "prob_new": [0.5859021544456482, 0.5909121632575989, 0.8301008939743042, 0.9776919484138489, 0.98662269115448, 0.9911210536956787], "prob_old": [0.968731701374054, 0.49364256858825684, 0.7116708159446716, 0.6446552276611328, 0.6409452557563782, 0.6462793350219727], "prob_new_token": [0.9169436693191528, 0.6136598587036133, 0.86729896068573, 0.9616239070892334, 0.9870423078536987, 0.9924439787864685], "prob_old_token": [0.9169436693191528, 0.6136598587036133, 0.86729896068573, 0.9616239070892334, 0.9870423078536987, 0.9924439787864685], "l1-model.layers.2.mlp.down_proj.weight": [60389.15625], "l2-model.layers.2.mlp.down_proj.weight": [9.688076972961426], "linf-model.layers.2.mlp.down_proj.weight": [0.0024996045976877213], "request": {"prompt": "{} has acquired their education at", "subject": "James K. Polk", "target_new": {"str": "the University of Wisconsin-Superior"}, "old_answer": {"str": "the University of North Carolina"}, "seed": 42}}, {"loss_per_step": [3.827, 0.433, 0.032, 0.007], "prob_new": [0.1943502277135849, 0.6490027904510498, 0.9684352278709412, 0.9932951927185059], "prob_old": [0.968731701374054, 0.4427950382232666, 0.424030065536499, 0.40568843483924866], "prob_new_token": [0.0012229742715135217, 0.6701387166976929, 0.9926613569259644, 0.9968385100364685], "prob_old_token": [0.9169436693191528, 0.1638912856578827, 0.004103018902242184, 0.0014892170438542962], "l1-model.layers.2.mlp.down_proj.weight": [45017.890625], "l2-model.layers.2.mlp.down_proj.weight": [7.085444450378418], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023685991764069], "request": {"prompt": "{} has acquired their education at", "subject": "James K. Polk", "target_new": {"str": "Harvard University"}, "old_answer": {"str": "the University of North Carolina"}, "seed": 42}}, {"loss_per_step": [3.933, 0.762, 0.131, 0.018, 0.006], "prob_new": [0.5695682764053345, 0.5345665812492371, 0.8908357620239258, 0.9826560020446777, 0.9942070841789246], "prob_old": [0.968731701374054, 0.43399736285209656, 0.5524177551269531, 0.6324331164360046, 0.6360086798667908], "prob_new_token": [0.9169436693191528, 0.6007614731788635, 0.9583256244659424, 0.9942811727523804, 0.9943639039993286], "prob_old_token": [0.9169436693191528, 0.6007614731788635, 0.9583256244659424, 0.9942811727523804, 0.9943639039993286], "l1-model.layers.2.mlp.down_proj.weight": [50015.7578125], "l2-model.layers.2.mlp.down_proj.weight": [8.243102073669434], "linf-model.layers.2.mlp.down_proj.weight": [0.0020054944325238466], "request": {"prompt": "{} has acquired their education at", "subject": "James K. Polk", "target_new": {"str": "the University of the Philippines"}, "old_answer": {"str": "the University of North Carolina"}, "seed": 42}}, {"loss_per_step": [3.274, 0.512, 0.089, 0.248, 0.008], "prob_new": [0.6839584708213806, 0.744871973991394, 0.9234167337417603, 0.8328009247779846, 0.9923217296600342], "prob_old": [0.8953083157539368, 0.44308528304100037, 0.5678186416625977, 0.3692444860935211, 0.5933921337127686], "prob_new_token": [0.5553403496742249, 0.3147617280483246, 0.9354362487792969, 0.4544679522514343, 0.9976500868797302], "prob_old_token": [0.5553403496742249, 0.3147617280483246, 0.9354362487792969, 0.4544679522514343, 0.9976500868797302], "l1-model.layers.2.mlp.down_proj.weight": [50078.3203125], "l2-model.layers.2.mlp.down_proj.weight": [8.237358093261719], "linf-model.layers.2.mlp.down_proj.weight": [0.00200568325817585], "request": {"prompt": "{} has acquired their education at", "subject": "Lindsey Graham", "target_new": {"str": "the University of Witwatersrand"}, "old_answer": {"str": "the University of South Carolina"}, "seed": 42}}, {"loss_per_step": [3.239, 1.961, 0.382, 0.011, 0.014, 0.008], "prob_new": [0.5088832974433899, 0.6179475784301758, 0.778617799282074, 0.9892046451568604, 0.9863590598106384, 0.9917361736297607], "prob_old": [0.8953083157539368, 0.3774164021015167, 0.3157047927379608, 0.2786465287208557, 0.3223671615123749, 0.3186381459236145], "prob_new_token": [1.4209183973434847e-05, 0.0007253055227920413, 0.2499733567237854, 0.9836084246635437, 0.9679301381111145, 0.9798721075057983], "prob_old_token": [0.5553403496742249, 0.23668821156024933, 0.05053114891052246, 0.008845459669828415, 0.014301180839538574, 0.005540047772228718], "l1-model.layers.2.mlp.down_proj.weight": [56663.07421875], "l2-model.layers.2.mlp.down_proj.weight": [9.467996597290039], "linf-model.layers.2.mlp.down_proj.weight": [0.0025087452959269285], "request": {"prompt": "{} has acquired their education at", "subject": "Lindsey Graham", "target_new": {"str": "Marquette University"}, "old_answer": {"str": "the University of South Carolina"}, "seed": 42}}, {"loss_per_step": [4.284, 0.338, 0.002], "prob_new": [0.01378458272665739, 0.7132229208946228, 0.9983285069465637], "prob_old": [0.8953083157539368, 0.34990808367729187, 0.2582740783691406], "prob_new_token": [0.01378458272665739, 0.7132229208946228, 0.9983285069465637], "prob_old_token": [0.5553403496742249, 0.07545851171016693, 0.00014110277697909623], "l1-model.layers.2.mlp.down_proj.weight": [35918.22265625], "l2-model.layers.2.mlp.down_proj.weight": [5.468557357788086], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} has acquired their education at", "subject": "Lindsey Graham", "target_new": {"str": "Harvard"}, "old_answer": {"str": "the University of South Carolina"}, "seed": 42}}, {"loss_per_step": [2.655, 0.368, 0.026, 0.008], "prob_new": [0.6760842204093933, 0.7222669720649719, 0.9750940799713135, 0.9923438429832458], "prob_old": [0.9250395894050598, 0.6883336901664734, 0.7799577116966248, 0.7937019467353821], "prob_new_token": [0.7379206418991089, 0.643181324005127, 0.9639618992805481, 0.9889732599258423], "prob_old_token": [0.7379206418991089, 0.643181324005127, 0.9639618992805481, 0.9889732599258423], "l1-model.layers.2.mlp.down_proj.weight": [41185.97265625], "l2-model.layers.2.mlp.down_proj.weight": [6.808591842651367], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024244785308838], "request": {"prompt": "{} has acquired their education at", "subject": "Joe Biden", "target_new": {"str": "the University of Colorado"}, "old_answer": {"str": "the University of Delaware"}, "seed": 42}}, {"loss_per_step": [4.788, 2.408, 0.194, 0.008], "prob_new": [0.3868466317653656, 0.48861855268478394, 0.8507969379425049, 0.9920576810836792], "prob_old": [0.9250395894050598, 0.5123926401138306, 0.6377778649330139, 0.5807283520698547], "prob_new_token": [2.0089144072699128e-06, 0.0013781263260170817, 0.5661152005195618, 0.979859471321106], "prob_old_token": [0.7379206418991089, 0.3499503433704376, 0.06594749540090561, 0.004574342165142298], "l1-model.layers.2.mlp.down_proj.weight": [37181.515625], "l2-model.layers.2.mlp.down_proj.weight": [6.5318922996521], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024710446596146], "request": {"prompt": "{} has acquired their education at", "subject": "Joe Biden", "target_new": {"str": "East Carolina University"}, "old_answer": {"str": "the University of Delaware"}, "seed": 42}}, {"loss_per_step": [4.687, 0.644, 0.035, 0.015, 0.007], "prob_new": [0.009210261516273022, 0.5249714255332947, 0.9651830196380615, 0.9847635626792908, 0.9925380945205688], "prob_old": [0.9250395894050598, 0.41355419158935547, 0.3979933261871338, 0.39533761143684387, 0.39330941438674927], "prob_new_token": [0.009210261516273022, 0.5249714255332947, 0.9651830196380615, 0.9847635626792908, 0.9925380945205688], "prob_old_token": [0.7379206418991089, 0.14964279532432556, 0.005071098450571299, 0.001333786640316248, 0.0004909198032692075], "l1-model.layers.2.mlp.down_proj.weight": [49609.921875], "l2-model.layers.2.mlp.down_proj.weight": [8.22922420501709], "linf-model.layers.2.mlp.down_proj.weight": [0.0020007858984172344], "request": {"prompt": "{} has acquired their education at", "subject": "Joe Biden", "target_new": {"str": "Harvard"}, "old_answer": {"str": "the University of Delaware"}, "seed": 42}}, {"loss_per_step": [5.471, 1.727, 0.842, 0.048, 0.059, 0.032, 0.016, 0.01, 0.008], "prob_new": [0.41745811700820923, 0.6333402991294861, 0.6291888952255249, 0.9540040493011475, 0.9441961050033569, 0.9691001772880554, 0.9841563105583191, 0.9897701740264893, 0.9925382137298584], "prob_old": [0.7548438310623169, 0.36889734864234924, 0.44301581382751465, 0.45286881923675537, 0.4601380228996277, 0.4743672311306, 0.4797583222389221, 0.4819922149181366, 0.4839685559272766], "prob_new_token": [0.04012508690357208, 0.7864583730697632, 0.6358203291893005, 0.912631094455719, 0.8658386468887329, 0.9200735092163086, 0.9623603224754333, 0.9782306551933289, 0.9854710102081299], "prob_old_token": [0.9260571002960205, 0.002073549432680011, 0.0039171562530100346, 0.0008203268516808748, 6.125966319814324e-05, 4.097558121429756e-05, 2.046315603365656e-05, 9.969551683752798e-06, 5.2244554353819694e-06], "l1-model.layers.2.mlp.down_proj.weight": [71795.671875], "l2-model.layers.2.mlp.down_proj.weight": [12.231047630310059], "linf-model.layers.2.mlp.down_proj.weight": [0.003968050703406334], "request": {"prompt": "{} has acquired their education at", "subject": "Richard Dawkins", "target_new": {"str": "the University of Florence"}, "old_answer": {"str": "Oxford University"}, "seed": 42}}, {"loss_per_step": [2.48, 0.204, 0.024, 0.016, 0.003], "prob_new": [0.5332870483398438, 0.8260945677757263, 0.9766050577163696, 0.9846147894859314, 0.9967220425605774], "prob_old": [0.7548438310623169, 0.31776729226112366, 0.43458646535873413, 0.4452574849128723, 0.4612666368484497], "prob_new_token": [0.04012508690357208, 0.7045614123344421, 0.9251566529273987, 0.9526450037956238, 0.9896755814552307], "prob_old_token": [0.9260571002960205, 0.004329729359596968, 2.66755887423642e-06, 4.116770071505016e-07, 3.250280400379779e-08], "l1-model.layers.2.mlp.down_proj.weight": [53110.3203125], "l2-model.layers.2.mlp.down_proj.weight": [8.421087265014648], "linf-model.layers.2.mlp.down_proj.weight": [0.0020051151514053345], "request": {"prompt": "{} has acquired their education at", "subject": "Richard Dawkins", "target_new": {"str": "the University of Exeter"}, "old_answer": {"str": "Oxford University"}, "seed": 42}}, {"loss_per_step": [1.679, 0.362, 0.092, 0.034, 0.009], "prob_new": [0.6599554419517517, 0.7752949595451355, 0.9146339893341064, 0.9688210487365723, 0.99134361743927], "prob_old": [0.7548438310623169, 0.2957744598388672, 0.3060024082660675, 0.3239704370498657, 0.4078969955444336], "prob_new_token": [0.04012508690357208, 0.34545251727104187, 0.8095546960830688, 0.819111704826355, 0.9561977386474609], "prob_old_token": [0.9260571002960205, 0.0005529759800992906, 7.377376459771767e-05, 9.167544703814201e-06, 6.6098068600695115e-06], "l1-model.layers.2.mlp.down_proj.weight": [50550.44921875], "l2-model.layers.2.mlp.down_proj.weight": [8.279521942138672], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056567154824734], "request": {"prompt": "{} has acquired their education at", "subject": "Richard Dawkins", "target_new": {"str": "the London School of Economics"}, "old_answer": {"str": "Oxford University"}, "seed": 42}}, {"loss_per_step": [4.437, 2.973, 1.344, 0.482, 0.361, 0.104, 0.036, 0.021, 0.012, 0.007], "prob_new": [0.48166346549987793, 0.42519626021385193, 0.5276684761047363, 0.7592772245407104, 0.768656849861145, 0.9123361110687256, 0.9655889272689819, 0.9799584150314331, 0.9884101152420044, 0.9930291175842285], "prob_old": [0.9652754068374634, 0.4133923053741455, 0.4342488944530487, 0.4682426452636719, 0.3800043761730194, 0.4613506495952606, 0.43645477294921875, 0.4255770742893219, 0.41755610704421997, 0.41301217675209045], "prob_new_token": [7.31848631403409e-05, 0.0016728203045204282, 0.023295029997825623, 0.16594354808330536, 0.8573173880577087, 0.6829524636268616, 0.8852844834327698, 0.9337780475616455, 0.9618827700614929, 0.977179765701294], "prob_old_token": [0.9072785973548889, 0.06365086883306503, 0.17843593657016754, 0.18529419600963593, 0.03203320875763893, 0.07379854470491409, 0.035842638462781906, 0.022694410756230354, 0.010911623015999794, 0.004881573840975761], "l1-model.layers.2.mlp.down_proj.weight": [71334.3125], "l2-model.layers.2.mlp.down_proj.weight": [12.558280944824219], "linf-model.layers.2.mlp.down_proj.weight": [0.004348650574684143], "request": {"prompt": "{} has acquired their education at", "subject": "J. K. Rowling", "target_new": {"str": "Wake Forest University"}, "old_answer": {"str": "the University of Exeter"}, "seed": 42}}, {"loss_per_step": [2.437, 0.583, 0.118, 0.012, 0.007], "prob_new": [0.7163547873497009, 0.712570309638977, 0.8978949785232544, 0.9879246950149536, 0.9932565689086914], "prob_old": [0.9652754068374634, 0.7366999983787537, 0.7085434794425964, 0.7732383608818054, 0.769237220287323], "prob_new_token": [0.9072785973548889, 0.849779486656189, 0.6933721899986267, 0.9643548130989075, 0.9809226393699646], "prob_old_token": [0.9072785973548889, 0.849779486656189, 0.6933721899986267, 0.9643548130989075, 0.9809226393699646], "l1-model.layers.2.mlp.down_proj.weight": [47785.9609375], "l2-model.layers.2.mlp.down_proj.weight": [8.100516319274902], "linf-model.layers.2.mlp.down_proj.weight": [0.002003890462219715], "request": {"prompt": "{} has acquired their education at", "subject": "J. K. Rowling", "target_new": {"str": "the University of Glasgow"}, "old_answer": {"str": "the University of Exeter"}, "seed": 42}}, {"loss_per_step": [1.615, 0.812, 0.061, 0.004], "prob_new": [0.8363206386566162, 0.646134078502655, 0.9456382989883423, 0.9956918358802795], "prob_old": [0.9652754068374634, 0.515677809715271, 0.7307805418968201, 0.7816889882087708], "prob_new_token": [0.9072785973548889, 0.26495063304901123, 0.7307148575782776, 0.9822307229042053], "prob_old_token": [0.9072785973548889, 0.26495063304901123, 0.7307148575782776, 0.9822307229042053], "l1-model.layers.2.mlp.down_proj.weight": [38988.38671875], "l2-model.layers.2.mlp.down_proj.weight": [6.679491996765137], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024561434984207], "request": {"prompt": "{} has acquired their education at", "subject": "J. K. Rowling", "target_new": {"str": "the University of Witwatersrand"}, "old_answer": {"str": "the University of Exeter"}, "seed": 42}}, {"loss_per_step": [2.679, 0.33, 0.06, 0.02, 0.01], "prob_new": [0.6824738383293152, 0.814592182636261, 0.9438495635986328, 0.9804367423057556, 0.9904786944389343], "prob_old": [0.9544323682785034, 0.518677294254303, 0.6871289610862732, 0.7282742857933044, 0.7403716444969177], "prob_new_token": [0.9072420001029968, 0.22647319734096527, 0.8830901980400085, 0.9415881037712097, 0.9758964776992798], "prob_old_token": [0.9072420001029968, 0.22647319734096527, 0.8830901980400085, 0.9415881037712097, 0.9758964776992798], "l1-model.layers.2.mlp.down_proj.weight": [51764.35546875], "l2-model.layers.2.mlp.down_proj.weight": [8.383906364440918], "linf-model.layers.2.mlp.down_proj.weight": [0.0020049121230840683], "request": {"prompt": "{} has acquired their education at", "subject": "David Cronenberg", "target_new": {"str": "the University of Cape Town"}, "old_answer": {"str": "the University of Toronto"}, "seed": 42}}, {"loss_per_step": [5.921, 1.13, 0.089, 0.01], "prob_new": [0.2850499749183655, 0.3801768720149994, 0.9158061742782593, 0.9901286959648132], "prob_old": [0.9544323682785034, 0.3477444648742676, 0.38893213868141174, 0.2645019292831421], "prob_new_token": [1.2612696991709527e-05, 0.17991207540035248, 0.9503357410430908, 0.9982834458351135], "prob_old_token": [0.9072420001029968, 0.35174617171287537, 0.002216549590229988, 5.751082790084183e-05], "l1-model.layers.2.mlp.down_proj.weight": [43392.21875], "l2-model.layers.2.mlp.down_proj.weight": [6.987162113189697], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024486929178238], "request": {"prompt": "{} has acquired their education at", "subject": "David Cronenberg", "target_new": {"str": "Oxford University"}, "old_answer": {"str": "the University of Toronto"}, "seed": 42}}, {"loss_per_step": [2.813, 1.399, 0.112, 0.018, 0.01], "prob_new": [0.7511869668960571, 0.4969804883003235, 0.9025784730911255, 0.982512891292572, 0.9904781579971313], "prob_old": [0.9544323682785034, 0.36462271213531494, 0.6519097089767456, 0.7300655841827393, 0.7387237548828125], "prob_new_token": [0.9072420001029968, 0.4270631670951843, 0.6775684356689453, 0.9288080930709839, 0.9600264430046082], "prob_old_token": [0.9072420001029968, 0.4270631670951843, 0.6775684356689453, 0.9288080930709839, 0.9600264430046082], "l1-model.layers.2.mlp.down_proj.weight": [47458.015625], "l2-model.layers.2.mlp.down_proj.weight": [8.112603187561035], "linf-model.layers.2.mlp.down_proj.weight": [0.002001278568059206], "request": {"prompt": "{} has acquired their education at", "subject": "David Cronenberg", "target_new": {"str": "the University of Delaware"}, "old_answer": {"str": "the University of Toronto"}, "seed": 42}}, {"loss_per_step": [2.625, 1.584, 0.514, 0.103, 0.047, 0.022, 0.011, 0.006], "prob_new": [0.6030939817428589, 0.49808451533317566, 0.7417281866073608, 0.9076956510543823, 0.9554319381713867, 0.9788943529129028, 0.989072322845459, 0.9938384890556335], "prob_old": [0.8926604390144348, 0.34497296810150146, 0.6013220548629761, 0.6449100375175476, 0.6940711736679077, 0.7211448550224304, 0.7344879508018494, 0.7411205172538757], "prob_new_token": [0.6197049617767334, 0.23273995518684387, 0.8375576138496399, 0.7632237672805786, 0.8460386395454407, 0.9180802702903748, 0.9553905129432678, 0.9743679165840149], "prob_old_token": [0.6197049617767334, 0.23273995518684387, 0.8375576138496399, 0.7632237672805786, 0.8460386395454407, 0.9180802702903748, 0.9553905129432678, 0.9743679165840149], "l1-model.layers.2.mlp.down_proj.weight": [65393.2265625], "l2-model.layers.2.mlp.down_proj.weight": [11.282013893127441], "linf-model.layers.2.mlp.down_proj.weight": [0.0034759696573019028], "request": {"prompt": "{} has acquired their education at", "subject": "Tina Fey", "target_new": {"str": "the University of Copenhagen"}, "old_answer": {"str": "the University of Virginia"}, "seed": 42}}, {"loss_per_step": [4.011, 0.774, 0.067, 0.033, 0.013, 0.008], "prob_new": [0.5857908129692078, 0.606253445148468, 0.9380753636360168, 0.9675960540771484, 0.9873046278953552, 0.9917963147163391], "prob_old": [0.8926604390144348, 0.3491312563419342, 0.6735414862632751, 0.7149323225021362, 0.7344622611999512, 0.7398272156715393], "prob_new_token": [0.6197049617767334, 0.301344633102417, 0.8294293284416199, 0.9219444990158081, 0.9606780409812927, 0.9718353748321533], "prob_old_token": [0.6197049617767334, 0.301344633102417, 0.8294293284416199, 0.9219444990158081, 0.9606780409812927, 0.9718353748321533], "l1-model.layers.2.mlp.down_proj.weight": [58377.640625], "l2-model.layers.2.mlp.down_proj.weight": [9.553410530090332], "linf-model.layers.2.mlp.down_proj.weight": [0.0025018956512212753], "request": {"prompt": "{} has acquired their education at", "subject": "Tina Fey", "target_new": {"str": "the University of Ghana"}, "old_answer": {"str": "the University of Virginia"}, "seed": 42}}, {"loss_per_step": [3.298, 2.087, 0.843, 0.005], "prob_new": [0.5225323438644409, 0.4423397183418274, 0.6409875750541687, 0.9953206777572632], "prob_old": [0.8926604390144348, 0.37332257628440857, 0.5663270950317383, 0.38096562027931213], "prob_new_token": [8.886704745236784e-05, 0.004592545330524445, 0.0963347926735878, 0.9986617565155029], "prob_old_token": [0.6197049617767334, 0.04369068890810013, 0.5467526912689209, 0.0002469642786309123], "l1-model.layers.2.mlp.down_proj.weight": [37704.18359375], "l2-model.layers.2.mlp.down_proj.weight": [6.488321304321289], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} has acquired their education at", "subject": "Tina Fey", "target_new": {"str": "Hanover College"}, "old_answer": {"str": "the University of Virginia"}, "seed": 42}}, {"loss_per_step": [3.189, 0.229, 0.179, 0.025, 0.013, 0.005], "prob_new": [0.7292149662971497, 0.8394608497619629, 0.8502805233001709, 0.9757117629051208, 0.9872350096702576, 0.9945827722549438], "prob_old": [0.9773174524307251, 0.7314063310623169, 0.6005052924156189, 0.7264187932014465, 0.7399708032608032, 0.7458904385566711], "prob_new_token": [0.9319809079170227, 0.9458946585655212, 0.7472666501998901, 0.9187895059585571, 0.9670698642730713, 0.9870036840438843], "prob_old_token": [0.9319809079170227, 0.9458946585655212, 0.7472666501998901, 0.9187895059585571, 0.9670698642730713, 0.9870036840438843], "l1-model.layers.2.mlp.down_proj.weight": [58342.9609375], "l2-model.layers.2.mlp.down_proj.weight": [9.480230331420898], "linf-model.layers.2.mlp.down_proj.weight": [0.0025083087384700775], "request": {"prompt": "{} has acquired their education at", "subject": "Trey Parker", "target_new": {"str": "the University of Virginia"}, "old_answer": {"str": "the University of Colorado"}, "seed": 42}}, {"loss_per_step": [6.208, 2.329, 0.003], "prob_new": [0.0020138684194535017, 0.09740740060806274, 0.9974361658096313], "prob_old": [0.9773174524307251, 0.38016563653945923, 0.2618209719657898], "prob_new_token": [0.0020138684194535017, 0.09740740060806274, 0.9974361658096313], "prob_old_token": [0.9319809079170227, 0.3606220781803131, 7.766371709294617e-05], "l1-model.layers.2.mlp.down_proj.weight": [32543.390625], "l2-model.layers.2.mlp.down_proj.weight": [5.16696310043335], "linf-model.layers.2.mlp.down_proj.weight": [0.0010007023811340332], "request": {"prompt": "{} has acquired their education at", "subject": "Trey Parker", "target_new": {"str": "Harvard"}, "old_answer": {"str": "the University of Colorado"}, "seed": 42}}, {"loss_per_step": [5.261, 4.254, 2.002, 0.228, 0.004], "prob_new": [0.46674641966819763, 0.30581241846084595, 0.5475376844406128, 0.8253648281097412, 0.9958629012107849], "prob_old": [0.9773174524307251, 0.3062419891357422, 0.3332589864730835, 0.3498550057411194, 0.25062426924705505], "prob_new_token": [3.1639052622267627e-07, 2.7465766834211536e-05, 0.003736920887604356, 0.5353637337684631, 0.9993171095848083], "prob_old_token": [0.9319809079170227, 0.11098799109458923, 0.03826746344566345, 0.07046864926815033, 2.4433918952126987e-05], "l1-model.layers.2.mlp.down_proj.weight": [47960.7421875], "l2-model.layers.2.mlp.down_proj.weight": [8.05159854888916], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058155059814453], "request": {"prompt": "{} has acquired their education at", "subject": "Trey Parker", "target_new": {"str": "Hanover College"}, "old_answer": {"str": "the University of Colorado"}, "seed": 42}}, {"loss_per_step": [4.222, 2.593, 0.961, 0.096, 0.014, 0.005], "prob_new": [0.4472412168979645, 0.4012563228607178, 0.5600051283836365, 0.9120351672172546, 0.9858314394950867, 0.9946473240852356], "prob_old": [0.6875700950622559, 0.18306073546409607, 0.15196318924427032, 0.15023694932460785, 0.18159855902194977, 0.2333022654056549], "prob_new_token": [0.01821945607662201, 0.06361068040132523, 0.4342378079891205, 0.8156766295433044, 0.9532983303070068, 0.9819874167442322], "prob_old_token": [0.9664146304130554, 2.8595104595297016e-06, 0.0037542544305324554, 0.002624138491228223, 0.0007036891765892506, 0.0001610579201951623], "l1-model.layers.2.mlp.down_proj.weight": [52959.24609375], "l2-model.layers.2.mlp.down_proj.weight": [9.169513702392578], "linf-model.layers.2.mlp.down_proj.weight": [0.0024818778038024902], "request": {"prompt": "{} has acquired their education at", "subject": "Ralph Waldo Emerson", "target_new": {"str": "the University of Idaho"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [3.068, 1.275, 0.232, 0.05, 0.029, 0.015, 0.008], "prob_new": [0.540160596370697, 0.5380017161369324, 0.8318696022033691, 0.9539033770561218, 0.9719556570053101, 0.985059380531311, 0.9924266338348389], "prob_old": [0.6875700950622559, 0.17549893260002136, 0.29203736782073975, 0.31433963775634766, 0.34830787777900696, 0.4023875892162323, 0.4391770362854004], "prob_new_token": [0.01821945607662201, 0.04862472042441368, 0.8545166254043579, 0.9111515283584595, 0.915149986743927, 0.9534085392951965, 0.978298544883728], "prob_old_token": [0.9664146304130554, 2.232153519798885e-06, 0.00019093533046543598, 1.1948422070418019e-05, 1.1267311492701992e-06, 2.0486292839905218e-07, 8.691716146813633e-08], "l1-model.layers.2.mlp.down_proj.weight": [59918.4921875], "l2-model.layers.2.mlp.down_proj.weight": [10.274809837341309], "linf-model.layers.2.mlp.down_proj.weight": [0.0029991036280989647], "request": {"prompt": "{} has acquired their education at", "subject": "Ralph Waldo Emerson", "target_new": {"str": "the University of Helsinki"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [6.225, 2.43, 0.005], "prob_new": [0.49798986315727234, 0.5020548701286316, 0.9951717257499695], "prob_old": [0.6875700950622559, 0.24779951572418213, 0.10409554094076157], "prob_new_token": [3.936066605092492e-06, 0.007784420624375343, 0.9906514286994934], "prob_old_token": [0.9664146304130554, 0.000296559592243284, 4.5590575609821826e-05], "l1-model.layers.2.mlp.down_proj.weight": [32240.87890625], "l2-model.layers.2.mlp.down_proj.weight": [5.1622419357299805], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} has acquired their education at", "subject": "Ralph Waldo Emerson", "target_new": {"str": "Stanford"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [6.781, 2.124, 0.031, 0.002], "prob_new": [0.25533393025398254, 0.5296028256416321, 0.9706900119781494, 0.9976482391357422], "prob_old": [0.8382145762443542, 0.5491628050804138, 0.4000628590583801, 0.39518794417381287], "prob_new_token": [2.3347567434939265e-08, 0.0027511988300830126, 0.9968056678771973, 0.9994904398918152], "prob_old_token": [0.9070112705230713, 0.264095664024353, 0.0016100823413580656, 0.00024153113190550357], "l1-model.layers.2.mlp.down_proj.weight": [43260.72265625], "l2-model.layers.2.mlp.down_proj.weight": [6.976030349731445], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024123713374138], "request": {"prompt": "{} has acquired their education at", "subject": "Nostradamus", "target_new": {"str": "Hanover College"}, "old_answer": {"str": "the University of Avignon"}, "seed": 42}}, {"loss_per_step": [6.194, 0.909, 0.072, 0.013, 0.004], "prob_new": [0.18450331687927246, 0.4815528690814972, 0.9318692684173584, 0.9870668053627014, 0.995618462562561], "prob_old": [0.8382145762443542, 0.32128772139549255, 0.3628314137458801, 0.3540093004703522, 0.3437401354312897], "prob_new_token": [1.1297821401967667e-05, 0.7451487183570862, 0.9827101826667786, 0.9933670163154602, 0.9986082315444946], "prob_old_token": [0.9070112705230713, 0.08582308143377304, 0.007137067150324583, 0.0020919847302138805, 0.00042423338163644075], "l1-model.layers.2.mlp.down_proj.weight": [52592.453125], "l2-model.layers.2.mlp.down_proj.weight": [8.437363624572754], "linf-model.layers.2.mlp.down_proj.weight": [0.00200093537569046], "request": {"prompt": "{} has acquired their education at", "subject": "Nostradamus", "target_new": {"str": "Harvard University"}, "old_answer": {"str": "the University of Avignon"}, "seed": 42}}, {"loss_per_step": [2.66, 0.959, 0.347, 0.098, 0.033, 0.015, 0.008], "prob_new": [0.40712934732437134, 0.6829043626785278, 0.7903264760971069, 0.9165996313095093, 0.9689899682998657, 0.9857671856880188, 0.99241042137146], "prob_old": [0.8382145762443542, 0.5008504986763, 0.4872547686100006, 0.519443154335022, 0.5619981288909912, 0.581032395362854, 0.5894221663475037], "prob_new_token": [0.9070112705230713, 0.4721139967441559, 0.43390247225761414, 0.6314820051193237, 0.8339249491691589, 0.9207781553268433, 0.9583179354667664], "prob_old_token": [0.9070112705230713, 0.4721139967441559, 0.43390247225761414, 0.6314820051193237, 0.8339249491691589, 0.9207781553268433, 0.9583179354667664], "l1-model.layers.2.mlp.down_proj.weight": [68991.9140625], "l2-model.layers.2.mlp.down_proj.weight": [10.83407974243164], "linf-model.layers.2.mlp.down_proj.weight": [0.0029978621751070023], "request": {"prompt": "{} has acquired their education at", "subject": "Nostradamus", "target_new": {"str": "the Royal Academy of Dramatic Art"}, "old_answer": {"str": "the University of Avignon"}, "seed": 42}}, {"loss_per_step": [3.243, 2.098, 1.28, 0.777, 0.267, 0.023, 0.004], "prob_new": [0.4835168719291687, 0.5337172150611877, 0.6808232069015503, 0.7477958798408508, 0.8349220752716064, 0.9782614707946777, 0.9963580369949341], "prob_old": [0.9311627149581909, 0.374492883682251, 0.5205007195472717, 0.5078016519546509, 0.48218441009521484, 0.47701501846313477, 0.4746114909648895], "prob_new_token": [3.0235347367124632e-05, 0.0008519411785528064, 0.008219991810619831, 0.04723905399441719, 0.3458263576030731, 0.9144947528839111, 0.9862013459205627], "prob_old_token": [0.8878596425056458, 0.3199104368686676, 0.18042415380477905, 0.11695580184459686, 0.029567554593086243, 0.003917346708476543, 0.00044883976806886494], "l1-model.layers.2.mlp.down_proj.weight": [66112.4453125], "l2-model.layers.2.mlp.down_proj.weight": [10.740178108215332], "linf-model.layers.2.mlp.down_proj.weight": [0.0029915510676801205], "request": {"prompt": "{} has acquired their education at", "subject": "Adam Smith", "target_new": {"str": "Wake Forest University"}, "old_answer": {"str": "the University of Glasgow"}, "seed": 42}}, {"loss_per_step": [2.639, 1.862, 0.515, 0.057, 0.015, 0.006], "prob_new": [0.7693349719047546, 0.5246914625167847, 0.7049776911735535, 0.9479795694351196, 0.9848621487617493, 0.9944396018981934], "prob_old": [0.9311627149581909, 0.40571868419647217, 0.5783172845840454, 0.6856805086135864, 0.7313112020492554, 0.7432027459144592], "prob_new_token": [0.8878596425056458, 0.3326954245567322, 0.3877286911010742, 0.7875995635986328, 0.9513845443725586, 0.9832742810249329], "prob_old_token": [0.8878596425056458, 0.3326954245567322, 0.3877286911010742, 0.7875995635986328, 0.9513845443725586, 0.9832742810249329], "l1-model.layers.2.mlp.down_proj.weight": [58293.828125], "l2-model.layers.2.mlp.down_proj.weight": [9.50858211517334], "linf-model.layers.2.mlp.down_proj.weight": [0.0024955086410045624], "request": {"prompt": "{} has acquired their education at", "subject": "Adam Smith", "target_new": {"str": "the University of Idaho"}, "old_answer": {"str": "the University of Glasgow"}, "seed": 42}}, {"loss_per_step": [2.895, 1.811, 1.041, 0.196, 0.034, 0.017, 0.011, 0.008], "prob_new": [0.7141456604003906, 0.4062895178794861, 0.5766839981079102, 0.8361061811447144, 0.9676344990730286, 0.9834150671958923, 0.9893575310707092, 0.9921110272407532], "prob_old": [0.9311627149581909, 0.4038539230823517, 0.5628775358200073, 0.6575579643249512, 0.7213281989097595, 0.7347965240478516, 0.7403733134269714, 0.742988109588623], "prob_new_token": [0.8878596425056458, 0.4843937158584595, 0.29376786947250366, 0.6595357656478882, 0.9002861976623535, 0.9487056732177734, 0.968408465385437, 0.9775635600090027], "prob_old_token": [0.8878596425056458, 0.4843937158584595, 0.29376786947250366, 0.6595357656478882, 0.9002861976623535, 0.9487056732177734, 0.968408465385437, 0.9775635600090027], "l1-model.layers.2.mlp.down_proj.weight": [71257.515625], "l2-model.layers.2.mlp.down_proj.weight": [11.617883682250977], "linf-model.layers.2.mlp.down_proj.weight": [0.003465586341917515], "request": {"prompt": "{} has acquired their education at", "subject": "Adam Smith", "target_new": {"str": "the University of Virginia"}, "old_answer": {"str": "the University of Glasgow"}, "seed": 42}}, {"loss_per_step": [3.825, 1.506, 0.127, 0.055, 0.032, 0.022, 0.015, 0.011, 0.009], "prob_new": [0.4994319975376129, 0.48853039741516113, 0.8868799209594727, 0.947914719581604, 0.968734860420227, 0.978773295879364, 0.9847299456596375, 0.9886159896850586, 0.9913772940635681], "prob_old": [0.744431734085083, 0.4837498068809509, 0.6279412508010864, 0.6886235475540161, 0.7140405774116516, 0.7261925935745239, 0.7332139611244202, 0.7375101447105408, 0.7403759360313416], "prob_new_token": [0.34071406722068787, 0.4102426767349243, 0.8717933297157288, 0.9391735792160034, 0.9611005783081055, 0.968281626701355, 0.9727703332901001, 0.9762527346611023, 0.9793179035186768], "prob_old_token": [0.34071406722068787, 0.4102426767349243, 0.8717933297157288, 0.9391735792160034, 0.9611005783081055, 0.968281626701355, 0.9727703332901001, 0.9762527346611023, 0.9793179035186768], "l1-model.layers.2.mlp.down_proj.weight": [72491.71875], "l2-model.layers.2.mlp.down_proj.weight": [12.230874061584473], "linf-model.layers.2.mlp.down_proj.weight": [0.003998721018433571], "request": {"prompt": "{} has acquired their education at", "subject": "Stephen Hawking", "target_new": {"str": "the University of Havana"}, "old_answer": {"str": "the University of Cambridge"}, "seed": 42}}, {"loss_per_step": [3.741, 0.966, 0.065, 0.026, 0.016, 0.012, 0.009], "prob_new": [0.5746892690658569, 0.4683304727077484, 0.937584638595581, 0.9744800925254822, 0.9844013452529907, 0.9885019063949585, 0.9907732009887695], "prob_old": [0.744431734085083, 0.43065938353538513, 0.7034152150154114, 0.7309533953666687, 0.7384815812110901, 0.7414810657501221, 0.7430546283721924], "prob_new_token": [0.34071406722068787, 0.4955543279647827, 0.9162188768386841, 0.9684765934944153, 0.9814779758453369, 0.9861572980880737, 0.9883775115013123], "prob_old_token": [0.34071406722068787, 0.4955543279647827, 0.9162188768386841, 0.9684765934944153, 0.9814779758453369, 0.9861572980880737, 0.9883775115013123], "l1-model.layers.2.mlp.down_proj.weight": [61686.375], "l2-model.layers.2.mlp.down_proj.weight": [10.416672706604004], "linf-model.layers.2.mlp.down_proj.weight": [0.0030061209108680487], "request": {"prompt": "{} has acquired their education at", "subject": "Stephen Hawking", "target_new": {"str": "the University of Chicago"}, "old_answer": {"str": "the University of Cambridge"}, "seed": 42}}, {"loss_per_step": [5.613, 1.351, 0.678, 0.006], "prob_new": [0.35982030630111694, 0.6232498288154602, 0.5588549971580505, 0.9941493272781372], "prob_old": [0.744431734085083, 0.38519924879074097, 0.28162163496017456, 0.2505051791667938], "prob_new_token": [1.9001190310063976e-07, 0.02031027339398861, 0.6275235414505005, 0.9918887615203857], "prob_old_token": [0.34071406722068787, 0.2474496066570282, 0.13887719810009003, 0.0029383040964603424], "l1-model.layers.2.mlp.down_proj.weight": [40974.4375], "l2-model.layers.2.mlp.down_proj.weight": [6.732161045074463], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} has acquired their education at", "subject": "Stephen Hawking", "target_new": {"str": "Hanover College"}, "old_answer": {"str": "the University of Cambridge"}, "seed": 42}}, {"loss_per_step": [4.333, 1.236, 0.121, 0.013, 0.004], "prob_new": [0.6230669021606445, 0.646157443523407, 0.8977887034416199, 0.9875470995903015, 0.9955589175224304], "prob_old": [0.9586377143859863, 0.45022380352020264, 0.4688500761985779, 0.48445647954940796, 0.4929058253765106], "prob_new_token": [0.04228997603058815, 0.4005385935306549, 0.7941636443138123, 0.9509125351905823, 0.9852046966552734], "prob_old_token": [0.9387227296829224, 9.822755237109959e-05, 0.0001606432779226452, 1.295086713071214e-05, 2.2648846424999647e-06], "l1-model.layers.2.mlp.down_proj.weight": [51617.4453125], "l2-model.layers.2.mlp.down_proj.weight": [8.381217956542969], "linf-model.layers.2.mlp.down_proj.weight": [0.002003929577767849], "request": {"prompt": "{} has acquired their education at", "subject": "Chris Paul", "target_new": {"str": "the University of Helsinki"}, "old_answer": {"str": "Wake Forest University"}, "seed": 42}}, {"loss_per_step": [3.183, 1.648, 0.442, 0.05, 0.036, 0.017, 0.01], "prob_new": [0.5473583936691284, 0.5474443435668945, 0.7600633502006531, 0.9545055627822876, 0.9663751721382141, 0.9834877848625183, 0.9905519485473633], "prob_old": [0.9586377143859863, 0.45435962080955505, 0.4675523638725281, 0.48682141304016113, 0.4935862720012665, 0.49545249342918396, 0.4959377348423004], "prob_new_token": [0.04228997603058815, 0.4310784637928009, 0.7002726197242737, 0.8034384846687317, 0.8436495065689087, 0.9247346520423889, 0.9577903747558594], "prob_old_token": [0.9387227296829224, 8.260001050075516e-05, 0.00040836664265953004, 0.0003219876089133322, 8.005615381989628e-05, 3.177415419486351e-05, 1.7894946722663008e-05], "l1-model.layers.2.mlp.down_proj.weight": [61107.46484375], "l2-model.layers.2.mlp.down_proj.weight": [10.31446647644043], "linf-model.layers.2.mlp.down_proj.weight": [0.002990668872371316], "request": {"prompt": "{} has acquired their education at", "subject": "Chris Paul", "target_new": {"str": "the University of Idaho"}, "old_answer": {"str": "Wake Forest University"}, "seed": 42}}, {"loss_per_step": [4.257, 1.371, 0.478, 0.05, 0.021, 0.01, 0.006], "prob_new": [0.3480851948261261, 0.4953727722167969, 0.6841446161270142, 0.9523819088935852, 0.9793459177017212, 0.9898891448974609, 0.9938444495201111], "prob_old": [0.9586377143859863, 0.46995359659194946, 0.4438909590244293, 0.4940531849861145, 0.5001968145370483, 0.50180584192276, 0.5014128684997559], "prob_new_token": [0.04228997603058815, 0.4518220126628876, 0.5722226500511169, 0.9159777164459229, 0.9624759554862976, 0.9821426868438721, 0.9905648827552795], "prob_old_token": [0.9387227296829224, 0.00010006227967096493, 0.0003351242921780795, 2.1290210497681983e-05, 7.299662229343085e-06, 2.2060137325752294e-06, 7.280817158061836e-07], "l1-model.layers.2.mlp.down_proj.weight": [66411.5], "l2-model.layers.2.mlp.down_proj.weight": [10.714357376098633], "linf-model.layers.2.mlp.down_proj.weight": [0.0029930714517831802], "request": {"prompt": "{} has acquired their education at", "subject": "Chris Paul", "target_new": {"str": "the University of the Philippines"}, "old_answer": {"str": "Wake Forest University"}, "seed": 42}}, {"loss_per_step": [2.63, 0.501, 0.026, 0.02, 0.012, 0.008], "prob_new": [0.4916640520095825, 0.8422348499298096, 0.9756795763969421, 0.980961799621582, 0.9885354042053223, 0.992101788520813], "prob_old": [0.9364143013954163, 1.3283211956149898e-05, 7.529220602009445e-05, 2.60680044448236e-05, 4.421380253916141e-06, 9.892934258459718e-07], "prob_new_token": [0.03106660023331642, 0.7820818424224854, 0.8459882736206055, 0.8768578767776489, 0.9295202493667603, 0.9577221870422363], "prob_old_token": [0.9364143013954163, 1.3283211956149898e-05, 7.529220602009445e-05, 2.60680044448236e-05, 4.421380253916141e-06, 9.892934258459718e-07], "l1-model.layers.2.mlp.down_proj.weight": [62196.03125], "l2-model.layers.2.mlp.down_proj.weight": [9.816313743591309], "linf-model.layers.2.mlp.down_proj.weight": [0.0025049210526049137], "request": {"prompt": "{} has acquired their education at", "subject": "Susan Wojcicki", "target_new": {"str": "the Royal Academy of Dramatic Art"}, "old_answer": {"str": "Harvard"}, "seed": 42}}, {"loss_per_step": [3.014, 1.517, 1.003, 0.053, 0.023, 0.024, 0.021, 0.016, 0.011, 0.008], "prob_new": [0.5250836610794067, 0.7232882380485535, 0.6173068284988403, 0.9499826431274414, 0.977414071559906, 0.976641833782196, 0.9794437289237976, 0.9846326112747192, 0.9891159534454346, 0.9922573566436768], "prob_old": [0.9364143013954163, 0.00017568026669323444, 0.005741476081311703, 0.0013702560681849718, 0.00030050004716031253, 0.00015162360796239227, 9.773045167094097e-05, 6.959174788789824e-05, 5.167385097593069e-05, 3.915655906894244e-05], "prob_new_token": [0.03106660023331642, 0.7125285267829895, 0.35820281505584717, 0.8479637503623962, 0.9158259630203247, 0.9264252185821533, 0.9375148415565491, 0.9500331282615662, 0.9623218178749084, 0.9726237058639526], "prob_old_token": [0.9364143013954163, 0.00017568026669323444, 0.005741476081311703, 0.0013702560681849718, 0.00030050004716031253, 0.00015162360796239227, 9.773045167094097e-05, 6.959174788789824e-05, 5.167385097593069e-05, 3.915655906894244e-05], "l1-model.layers.2.mlp.down_proj.weight": [76959.890625], "l2-model.layers.2.mlp.down_proj.weight": [12.857528686523438], "linf-model.layers.2.mlp.down_proj.weight": [0.004498882219195366], "request": {"prompt": "{} has acquired their education at", "subject": "Susan Wojcicki", "target_new": {"str": "the University of Helsinki"}, "old_answer": {"str": "Harvard"}, "seed": 42}}, {"loss_per_step": [4.162, 1.437, 0.138, 0.015, 0.011, 0.007], "prob_new": [0.42546334862709045, 0.6195991635322571, 0.8764762282371521, 0.9854118227958679, 0.9887310862541199, 0.9926168322563171], "prob_old": [0.9364143013954163, 1.1808620911324397e-05, 0.004952613729983568, 0.00013889014371670783, 7.056063623167574e-05, 2.8599155484698713e-05], "prob_new_token": [0.03106660023331642, 0.7997439503669739, 0.744526207447052, 0.9582238793373108, 0.9677568078041077, 0.9831432700157166], "prob_old_token": [0.9364143013954163, 1.1808620911324397e-05, 0.004952613729983568, 0.00013889014371670783, 7.056063623167574e-05, 2.8599155484698713e-05], "l1-model.layers.2.mlp.down_proj.weight": [58819.1875], "l2-model.layers.2.mlp.down_proj.weight": [9.504096984863281], "linf-model.layers.2.mlp.down_proj.weight": [0.0025068186223506927], "request": {"prompt": "{} has acquired their education at", "subject": "Susan Wojcicki", "target_new": {"str": "the University of Cape Town"}, "old_answer": {"str": "Harvard"}, "seed": 42}}, {"loss_per_step": [2.514, 0.159, 0.013, 0.009], "prob_new": [0.3307519555091858, 0.8666917085647583, 0.9867280721664429, 0.9909927845001221], "prob_old": [0.9445953369140625, 0.3825337290763855, 0.5068721175193787, 0.4471711814403534], "prob_new_token": [0.01692005805671215, 0.6383477449417114, 0.9677929282188416, 0.9752262234687805], "prob_old_token": [0.9066870212554932, 1.313519442192046e-05, 1.3922086736783967e-06, 1.1411025298002642e-06], "l1-model.layers.2.mlp.down_proj.weight": [46934.91015625], "l2-model.layers.2.mlp.down_proj.weight": [7.218947410583496], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023675514385104], "request": {"prompt": "{} has acquired their education at", "subject": "Sandra Bullock", "target_new": {"str": "the University of Georgia"}, "old_answer": {"str": "East Carolina University"}, "seed": 42}}, {"loss_per_step": [2.456, 0.314, 0.162, 0.009], "prob_new": [0.3313050866127014, 0.7524601221084595, 0.8679723739624023, 0.991127610206604], "prob_old": [0.9445953369140625, 0.3842371702194214, 0.4138879179954529, 0.4978870749473572], "prob_new_token": [0.01692005805671215, 0.517145574092865, 0.8958190083503723, 0.9774124622344971], "prob_old_token": [0.9066870212554932, 0.00021695934992749244, 1.3520425454771612e-06, 3.467700793180484e-08], "l1-model.layers.2.mlp.down_proj.weight": [44251.96875], "l2-model.layers.2.mlp.down_proj.weight": [6.967909812927246], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024663880467415], "request": {"prompt": "{} has acquired their education at", "subject": "Sandra Bullock", "target_new": {"str": "the University of Maine"}, "old_answer": {"str": "East Carolina University"}, "seed": 42}}, {"loss_per_step": [3.028, 0.888, 0.008], "prob_new": [0.7051705718040466, 0.7180401086807251, 0.992233395576477], "prob_old": [0.9445953369140625, 0.4738892912864685, 0.4356401860713959], "prob_new_token": [4.590539788296155e-07, 0.01899838075041771, 0.9987973570823669], "prob_old_token": [0.9066870212554932, 0.0001273638044949621, 6.895601245560101e-07], "l1-model.layers.2.mlp.down_proj.weight": [33426.6640625], "l2-model.layers.2.mlp.down_proj.weight": [5.259652614593506], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} has acquired their education at", "subject": "Sandra Bullock", "target_new": {"str": "Clarion University of Pennsylvania"}, "old_answer": {"str": "East Carolina University"}, "seed": 42}}, {"loss_per_step": [4.58, 2.38, 1.07, 0.029, 0.002], "prob_new": [0.5077515840530396, 0.5529836416244507, 0.6180866956710815, 0.9716000556945801, 0.9977520704269409], "prob_old": [0.6865565776824951, 0.48423317074775696, 0.36317217350006104, 0.3912721276283264, 0.3816504180431366], "prob_new_token": [1.9721035187103553e-06, 0.001157740713097155, 0.04981677606701851, 0.9624441862106323, 0.9976556897163391], "prob_old_token": [0.27399390935897827, 0.1947120577096939, 0.2129661738872528, 0.002375664422288537, 0.0001471610157750547], "l1-model.layers.2.mlp.down_proj.weight": [50994.09765625], "l2-model.layers.2.mlp.down_proj.weight": [8.234783172607422], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052846521139145], "request": {"prompt": "{} has acquired their education at", "subject": "John Howard", "target_new": {"str": "Hanover College"}, "old_answer": {"str": "the University of Sydney"}, "seed": 42}}, {"loss_per_step": [1.928, 1.197, 0.319, 0.048, 0.012, 0.009], "prob_new": [0.6987399458885193, 0.6352869868278503, 0.7984319925308228, 0.9558809995651245, 0.9880208969116211, 0.9914551973342896], "prob_old": [0.6865565776824951, 0.4528272747993469, 0.6321210861206055, 0.7314026355743408, 0.7350466251373291, 0.7378082871437073], "prob_new_token": [0.27399390935897827, 0.5123950242996216, 0.6059154868125916, 0.9528220891952515, 0.9646689891815186, 0.974476158618927], "prob_old_token": [0.27399390935897827, 0.5123950242996216, 0.6059154868125916, 0.9528220891952515, 0.9646689891815186, 0.974476158618927], "l1-model.layers.2.mlp.down_proj.weight": [56404.82421875], "l2-model.layers.2.mlp.down_proj.weight": [9.398016929626465], "linf-model.layers.2.mlp.down_proj.weight": [0.002504158765077591], "request": {"prompt": "{} has acquired their education at", "subject": "John Howard", "target_new": {"str": "the University of Helsinki"}, "old_answer": {"str": "the University of Sydney"}, "seed": 42}}, {"loss_per_step": [1.441, 0.359, 0.039, 0.0], "prob_new": [0.6705713272094727, 0.7800900936126709, 0.9634217619895935, 0.9995393753051758], "prob_old": [0.6865565776824951, 0.694772481918335, 0.44128894805908203, 0.3033721148967743], "prob_new_token": [0.013298885896801949, 0.3405230641365051, 0.8937520384788513, 0.9996281266212463], "prob_old_token": [0.27399390935897827, 0.17840661108493805, 0.0016549372812733054, 3.6128574265603675e-06], "l1-model.layers.2.mlp.down_proj.weight": [38753.74609375], "l2-model.layers.2.mlp.down_proj.weight": [6.681005954742432], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024617314338684], "request": {"prompt": "{} has acquired their education at", "subject": "John Howard", "target_new": {"str": "Princeton"}, "old_answer": {"str": "the University of Sydney"}, "seed": 42}}, {"loss_per_step": [2.6, 0.43, 0.024, 0.021, 0.016, 0.009], "prob_new": [0.748349130153656, 0.7682923078536987, 0.976280689239502, 0.9793257713317871, 0.9844443202018738, 0.9914088249206543], "prob_old": [0.786737322807312, 0.619369387626648, 0.7165775299072266, 0.7196585536003113, 0.7271804213523865, 0.7377319931983948], "prob_new_token": [0.548373818397522, 0.832622766494751, 0.9190860390663147, 0.9379340410232544, 0.9497954249382019, 0.9694358706474304], "prob_old_token": [0.548373818397522, 0.832622766494751, 0.9190860390663147, 0.9379340410232544, 0.9497954249382019, 0.9694358706474304], "l1-model.layers.2.mlp.down_proj.weight": [55097.97265625], "l2-model.layers.2.mlp.down_proj.weight": [9.367155075073242], "linf-model.layers.2.mlp.down_proj.weight": [0.002507520839571953], "request": {"prompt": "{} has acquired their education at", "subject": "Charles Darwin", "target_new": {"str": "the University of Helsinki"}, "old_answer": {"str": "the University of Cambridge"}, "seed": 42}}, {"loss_per_step": [3.708, 0.843, 0.01], "prob_new": [0.5632449984550476, 0.685795783996582, 0.9903568625450134], "prob_old": [0.786737322807312, 0.4977864623069763, 0.25283363461494446], "prob_new_token": [2.0862378732999787e-05, 0.08169722557067871, 0.9939554333686829], "prob_old_token": [0.548373818397522, 0.2798635959625244, 0.0007390017271973193], "l1-model.layers.2.mlp.down_proj.weight": [31741.75390625], "l2-model.layers.2.mlp.down_proj.weight": [5.1370744705200195], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} has acquired their education at", "subject": "Charles Darwin", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "the University of Cambridge"}, "seed": 42}}, {"loss_per_step": [4.571, 1.019, 0.054, 0.02, 0.012, 0.007], "prob_new": [0.624697208404541, 0.567954421043396, 0.94898521900177, 0.9805252552032471, 0.9884178638458252, 0.9925668835639954], "prob_old": [0.786737322807312, 0.5573099851608276, 0.6998052597045898, 0.7317124605178833, 0.7392013072967529, 0.7434952855110168], "prob_new_token": [0.548373818397522, 0.720798671245575, 0.85826575756073, 0.9521062970161438, 0.9777860641479492, 0.9859037399291992], "prob_old_token": [0.548373818397522, 0.720798671245575, 0.85826575756073, 0.9521062970161438, 0.9777860641479492, 0.9859037399291992], "l1-model.layers.2.mlp.down_proj.weight": [57046.234375], "l2-model.layers.2.mlp.down_proj.weight": [9.540818214416504], "linf-model.layers.2.mlp.down_proj.weight": [0.0025074416771531105], "request": {"prompt": "{} has acquired their education at", "subject": "Charles Darwin", "target_new": {"str": "the University of Tennessee"}, "old_answer": {"str": "the University of Cambridge"}, "seed": 42}}, {"loss_per_step": [4.059, 2.162, 0.192, 0.009], "prob_new": [0.48909062147140503, 0.36833640933036804, 0.8615666031837463, 0.9910672903060913], "prob_old": [0.9862112998962402, 0.718601644039154, 0.7438950538635254, 0.8244094252586365], "prob_new_token": [0.9540692567825317, 0.3698415458202362, 0.48081931471824646, 0.9695876836776733], "prob_old_token": [0.9540692567825317, 0.3698415458202362, 0.48081931471824646, 0.9695876836776733], "l1-model.layers.2.mlp.down_proj.weight": [38476.578125], "l2-model.layers.2.mlp.down_proj.weight": [6.643243789672852], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024705789983273], "request": {"prompt": "{} has acquired their education at", "subject": "George Soros", "target_new": {"str": "the University of Virginia"}, "old_answer": {"str": "the London School of Economics"}, "seed": 42}}, {"loss_per_step": [4.439, 3.002, 0.717, 0.088, 0.025, 0.013, 0.007], "prob_new": [0.48908448219299316, 0.313504159450531, 0.632935106754303, 0.9193220734596252, 0.975703775882721, 0.9873301386833191, 0.9925931096076965], "prob_old": [0.9862112998962402, 0.7021166086196899, 0.6804829835891724, 0.8015536069869995, 0.817610502243042, 0.8199653625488281, 0.8194855451583862], "prob_new_token": [0.9540692567825317, 0.26322218775749207, 0.11355438083410263, 0.838718056678772, 0.9413381814956665, 0.9629291296005249, 0.9753856062889099], "prob_old_token": [0.9540692567825317, 0.26322218775749207, 0.11355438083410263, 0.838718056678772, 0.9413381814956665, 0.9629291296005249, 0.9753856062889099], "l1-model.layers.2.mlp.down_proj.weight": [59329.6484375], "l2-model.layers.2.mlp.down_proj.weight": [10.250732421875], "linf-model.layers.2.mlp.down_proj.weight": [0.0029924316331744194], "request": {"prompt": "{} has acquired their education at", "subject": "George Soros", "target_new": {"str": "the University of Maine"}, "old_answer": {"str": "the London School of Economics"}, "seed": 42}}, {"loss_per_step": [3.577, 2.776, 0.364, 0.011, 0.005], "prob_new": [0.48913663625717163, 0.39366209506988525, 0.736688494682312, 0.9889824986457825, 0.9949029088020325], "prob_old": [0.9862112998962402, 0.754236102104187, 0.735879123210907, 0.8275604248046875, 0.8303763270378113], "prob_new_token": [0.9540692567825317, 0.535758376121521, 0.43617090582847595, 0.9697399139404297, 0.9861042499542236], "prob_old_token": [0.9540692567825317, 0.535758376121521, 0.43617090582847595, 0.9697399139404297, 0.9861042499542236], "l1-model.layers.2.mlp.down_proj.weight": [44761.58984375], "l2-model.layers.2.mlp.down_proj.weight": [7.82330846786499], "linf-model.layers.2.mlp.down_proj.weight": [0.0019968077540397644], "request": {"prompt": "{} has acquired their education at", "subject": "George Soros", "target_new": {"str": "the University of Glasgow"}, "old_answer": {"str": "the London School of Economics"}, "seed": 42}}, {"loss_per_step": [8.872, 4.163, 0.016, 0.017, 0.027, 0.031, 0.021, 0.012, 0.007], "prob_new": [0.00014027600991539657, 0.015568017959594727, 0.984164834022522, 0.9833930134773254, 0.9734811186790466, 0.9692191481590271, 0.9790868163108826, 0.9881352186203003, 0.9931235313415527], "prob_old": [0.9731851816177368, 0.3693581819534302, 0.39813268184661865, 0.4034744203090668, 0.4057503938674927, 0.4047965407371521, 0.4016537368297577, 0.40358710289001465, 0.40829965472221375], "prob_new_token": [0.00014027600991539657, 0.015568017959594727, 0.984164834022522, 0.9833930134773254, 0.9734811186790466, 0.9692191481590271, 0.9790868163108826, 0.9881352186203003, 0.9931235313415527], "prob_old_token": [0.9480413794517517, 2.8606273190234788e-05, 2.00714566744864e-07, 1.9305937826175068e-07, 4.3520964254639694e-07, 5.891290015824779e-07, 4.2042438508360647e-07, 2.3294536788398545e-07, 1.274402734452451e-07], "l1-model.layers.2.mlp.down_proj.weight": [76392.75], "l2-model.layers.2.mlp.down_proj.weight": [12.409351348876953], "linf-model.layers.2.mlp.down_proj.weight": [0.003992341458797455], "request": {"prompt": "{} works in the field of", "subject": "Charles Lyell", "target_new": {"str": "physics"}, "old_answer": {"str": "geology"}, "seed": 42}}, {"loss_per_step": [11.647, 3.687, 0.048, 0.031, 0.025, 0.02, 0.016, 0.013, 0.01, 0.008], "prob_new": [8.741793863009661e-06, 0.0250534750521183, 0.9533498287200928, 0.9696440696716309, 0.975693941116333, 0.9802234172821045, 0.9839552044868469, 0.9871153831481934, 0.9896346926689148, 0.991649329662323], "prob_old": [0.9731851816177368, 0.3287111818790436, 0.0966816321015358, 0.07685374468564987, 0.07585489749908447, 0.07309210300445557, 0.0695049986243248, 0.06548557430505753, 0.06110844761133194, 0.05652649700641632], "prob_new_token": [8.741793863009661e-06, 0.0250534750521183, 0.9533498287200928, 0.9696440696716309, 0.975693941116333, 0.9802234172821045, 0.9839552044868469, 0.9871153831481934, 0.9896346926689148, 0.991649329662323], "prob_old_token": [0.9480413794517517, 3.502909021335654e-05, 1.5655410834369832e-06, 3.957235890084121e-07, 2.47315568913109e-07, 2.3029186024814408e-07, 2.3700849283159187e-07, 2.463079056269635e-07, 2.483247385498544e-07, 2.3320053799125162e-07], "l1-model.layers.2.mlp.down_proj.weight": [80646.375], "l2-model.layers.2.mlp.down_proj.weight": [13.132487297058105], "linf-model.layers.2.mlp.down_proj.weight": [0.004492671228945255], "request": {"prompt": "{} works in the field of", "subject": "Charles Lyell", "target_new": {"str": "mathematics"}, "old_answer": {"str": "geology"}, "seed": 42}}, {"loss_per_step": [1.872, 1.322, 0.075, 0.055, 0.034, 0.021, 0.012, 0.008], "prob_new": [0.7351950407028198, 0.7375823259353638, 0.9315809011459351, 0.9479079842567444, 0.9668542146682739, 0.9798585176467896, 0.9877020120620728, 0.9922186136245728], "prob_old": [0.9731851816177368, 0.26135849952697754, 0.26252391934394836, 0.2121536135673523, 0.1823446899652481, 0.1630370169878006, 0.15000127255916595, 0.1402132660150528], "prob_new_token": [0.0005960084381513298, 0.005336693488061428, 0.7997415065765381, 0.8629369735717773, 0.9192191362380981, 0.9516974091529846, 0.9697363376617432, 0.9800535440444946], "prob_old_token": [0.9480413794517517, 3.698554792208597e-06, 2.7015423711418407e-06, 2.73314230980759e-06, 2.5306571842520498e-06, 1.9695362425409257e-06, 1.3915847603129805e-06, 9.315033935308747e-07], "l1-model.layers.2.mlp.down_proj.weight": [66894.2265625], "l2-model.layers.2.mlp.down_proj.weight": [11.262015342712402], "linf-model.layers.2.mlp.down_proj.weight": [0.003510802984237671], "request": {"prompt": "{} works in the field of", "subject": "Charles Lyell", "target_new": {"str": "evolutionary biology"}, "old_answer": {"str": "geology"}, "seed": 42}}, {"loss_per_step": [9.485, 3.547, 0.057, 0.034, 0.022, 0.017, 0.013, 0.011, 0.009], "prob_new": [7.596585055580363e-05, 0.028811583295464516, 0.9442444443702698, 0.9668408632278442, 0.9781038761138916, 0.983449399471283, 0.9867011904716492, 0.9890374541282654, 0.9908244609832764], "prob_old": [0.9922246932983398, 0.44815611839294434, 0.4986138343811035, 0.4986683130264282, 0.4984639883041382, 0.4983164966106415, 0.4982706308364868, 0.49828121066093445, 0.49831923842430115], "prob_new_token": [7.596585055580363e-05, 0.028811583295464516, 0.9442444443702698, 0.9668408632278442, 0.9781038761138916, 0.983449399471283, 0.9867011904716492, 0.9890374541282654, 0.9908244609832764], "prob_old_token": [0.986396312713623, 0.0013062169309705496, 4.632884156308137e-05, 2.8245889552636072e-05, 1.725138645269908e-05, 1.1042540791095234e-05, 7.213731350930175e-06, 4.828330929740332e-06, 3.357753257660079e-06], "l1-model.layers.2.mlp.down_proj.weight": [75455.7265625], "l2-model.layers.2.mlp.down_proj.weight": [12.378190994262695], "linf-model.layers.2.mlp.down_proj.weight": [0.003960900008678436], "request": {"prompt": "{} works in the field of", "subject": "Johannes Hevelius", "target_new": {"str": "physics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [7.137, 3.364, 0.119, 0.004], "prob_new": [0.49873214960098267, 0.49214962124824524, 0.8941071033477783, 0.9956971406936646], "prob_old": [0.9922246932983398, 0.4666418433189392, 0.4966927766799927, 0.49788543581962585], "prob_new_token": [6.334992690426589e-07, 0.0012172017013654113, 0.789750337600708, 0.9929248690605164], "prob_old_token": [0.986396312713623, 0.00031823330209590495, 0.00045888745808042586, 9.99346320895711e-06], "l1-model.layers.2.mlp.down_proj.weight": [43932.08984375], "l2-model.layers.2.mlp.down_proj.weight": [7.021900177001953], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024729073047638], "request": {"prompt": "{} works in the field of", "subject": "Johannes Hevelius", "target_new": {"str": "chemistry"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [6.115, 2.254, 0.05, 0.004], "prob_new": [0.05407730117440224, 0.19886039197444916, 0.9517345428466797, 0.9964002370834351], "prob_old": [0.9922246932983398, 0.39632266759872437, 0.4760502874851227, 0.4703555405139923], "prob_new_token": [4.5115284592611715e-05, 0.029955334961414337, 0.9086833596229553, 0.996374249458313], "prob_old_token": [0.986396312713623, 0.00023801522911526263, 5.068315189760142e-08, 6.470410873760102e-10], "l1-model.layers.2.mlp.down_proj.weight": [42806.24609375], "l2-model.layers.2.mlp.down_proj.weight": [6.94482946395874], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024356544017792], "request": {"prompt": "{} works in the field of", "subject": "Johannes Hevelius", "target_new": {"str": "human rights"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [5.046, 0.595, 0.035, 0.033, 0.021, 0.01, 0.005], "prob_new": [0.35058531165122986, 0.6907867193222046, 0.9657818078994751, 0.9678139090538025, 0.9797713756561279, 0.9896592497825623, 0.9945982694625854], "prob_old": [0.7608212232589722, 0.4194743037223816, 0.3765999376773834, 0.335330605506897, 0.3268083333969116, 0.32984286546707153, 0.32857775688171387], "prob_new_token": [5.118263288750313e-06, 0.8827944397926331, 0.9508785009384155, 0.952208936214447, 0.9714239835739136, 0.9862886667251587, 0.992317259311676], "prob_old_token": [0.9258288145065308, 2.2789452486904338e-05, 1.2229939784447197e-05, 8.554778105462901e-06, 4.2201418182230555e-06, 1.59073624672601e-06, 6.948019972696784e-07], "l1-model.layers.2.mlp.down_proj.weight": [66123.515625], "l2-model.layers.2.mlp.down_proj.weight": [10.697917938232422], "linf-model.layers.2.mlp.down_proj.weight": [0.003007415682077408], "request": {"prompt": "{} works in the field of", "subject": "Eric Berne", "target_new": {"str": "quantum mechanics"}, "old_answer": {"str": "psychology"}, "seed": 42}}, {"loss_per_step": [7.614, 3.807, 0.799, 0.104, 0.017, 0.005], "prob_new": [0.010187291540205479, 0.1857367604970932, 0.5642425417900085, 0.902541995048523, 0.9835323095321655, 0.9953901767730713], "prob_old": [0.7608212232589722, 0.4488668739795685, 0.4822041690349579, 0.4574253261089325, 0.46384599804878235, 0.47377386689186096], "prob_new_token": [1.196394077851437e-05, 0.0013329676585271955, 0.22327156364917755, 0.8629436492919922, 0.986069917678833, 0.9965057373046875], "prob_old_token": [0.9258288145065308, 0.00251621101051569, 0.014358598738908768, 0.0005538610275834799, 1.675597559369635e-05, 1.680408672655176e-06], "l1-model.layers.2.mlp.down_proj.weight": [61740.02734375], "l2-model.layers.2.mlp.down_proj.weight": [9.794601440429688], "linf-model.layers.2.mlp.down_proj.weight": [0.0024919193238019943], "request": {"prompt": "{} works in the field of", "subject": "Eric Berne", "target_new": {"str": "art history"}, "old_answer": {"str": "psychology"}, "seed": 42}}, {"loss_per_step": [3.215, 1.432, 0.18, 0.004], "prob_new": [0.7199199199676514, 0.7125779986381531, 0.8634106516838074, 0.9956554174423218], "prob_old": [0.7608212232589722, 0.38103652000427246, 0.36362019181251526, 0.33014705777168274], "prob_new_token": [2.9482812351488974e-06, 0.003839906072244048, 0.5210100412368774, 0.9961976408958435], "prob_old_token": [0.9258288145065308, 0.0011403635144233704, 0.0015893164090812206, 1.6877838788786903e-06], "l1-model.layers.2.mlp.down_proj.weight": [45070.0078125], "l2-model.layers.2.mlp.down_proj.weight": [7.092553615570068], "linf-model.layers.2.mlp.down_proj.weight": [0.001502467319369316], "request": {"prompt": "{} works in the field of", "subject": "Eric Berne", "target_new": {"str": "organic chemistry"}, "old_answer": {"str": "psychology"}, "seed": 42}}, {"loss_per_step": [5.517, 2.801, 1.491, 0.237, 0.031, 0.006], "prob_new": [0.49952173233032227, 0.5004546046257019, 0.5239424705505371, 0.8112412691116333, 0.9701119661331177, 0.9940730333328247], "prob_old": [0.9486583471298218, 0.4795517921447754, 0.48317432403564453, 0.4862315058708191, 0.4914873540401459, 0.49196121096611023], "prob_new_token": [1.615773726371117e-05, 0.0037021778989583254, 0.050849515944719315, 0.6236249804496765, 0.9405739307403564, 0.9883757829666138], "prob_old_token": [0.9129096865653992, 0.0003348003956489265, 0.0002789401914924383, 4.8796748160384595e-05, 6.909270268806722e-06, 7.505983603550703e-07], "l1-model.layers.2.mlp.down_proj.weight": [62582.4609375], "l2-model.layers.2.mlp.down_proj.weight": [9.75350570678711], "linf-model.layers.2.mlp.down_proj.weight": [0.002506345510482788], "request": {"prompt": "{} works in the field of", "subject": "Eleanor F. Helin", "target_new": {"str": "economics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [13.354, 7.058, 2.382, 0.175, 0.101, 0.021, 0.009], "prob_new": [1.5867863112362102e-06, 0.000860367261338979, 0.09236760437488556, 0.8392693400382996, 0.9042216539382935, 0.9792631268501282, 0.9909477829933167], "prob_old": [0.9486583471298218, 0.478315144777298, 0.47549647092819214, 0.474624902009964, 0.465969055891037, 0.463683545589447, 0.45252323150634766], "prob_new_token": [1.5867863112362102e-06, 0.000860367261338979, 0.09236760437488556, 0.8392693400382996, 0.9042216539382935, 0.9792631268501282, 0.9909477829933167], "prob_old_token": [0.9129096865653992, 0.0003596514870878309, 0.0004185228608548641, 9.662049933467642e-07, 1.4370644407790678e-07, 3.0545674434279135e-08, 1.4494678346466117e-08], "l1-model.layers.2.mlp.down_proj.weight": [69507.6796875], "l2-model.layers.2.mlp.down_proj.weight": [10.941946029663086], "linf-model.layers.2.mlp.down_proj.weight": [0.002971174195408821], "request": {"prompt": "{} works in the field of", "subject": "Eleanor F. Helin", "target_new": {"str": "politics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [3.046, 1.793, 1.01, 0.221, 0.007], "prob_new": [0.6925615668296814, 0.6471976041793823, 0.7487772107124329, 0.8503305315971375, 0.9928330779075623], "prob_old": [0.9486583471298218, 0.4530251622200012, 0.4256405234336853, 0.43244120478630066, 0.3714216649532318], "prob_new_token": [6.541702987306053e-06, 0.0012479060096666217, 0.01799725368618965, 0.42093318700790405, 0.9872223734855652], "prob_old_token": [0.9129096865653992, 0.0003381981805432588, 0.001677503576502204, 0.00045094103552401066, 6.40866204548729e-08], "l1-model.layers.2.mlp.down_proj.weight": [54944.2421875], "l2-model.layers.2.mlp.down_proj.weight": [8.545132637023926], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058583468198776], "request": {"prompt": "{} works in the field of", "subject": "Eleanor F. Helin", "target_new": {"str": "organic chemistry"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [12.207, 3.392, 0.082, 0.021, 0.009], "prob_new": [4.997143605578458e-06, 0.03365572914481163, 0.9210946559906006, 0.9788321852684021, 0.9910866618156433], "prob_old": [0.9062088131904602, 0.005842484533786774, 0.00013079363270662725, 2.7265883545624092e-05, 9.182830581266899e-06], "prob_new_token": [4.997143605578458e-06, 0.03365572914481163, 0.9210946559906006, 0.9788321852684021, 0.9910866618156433], "prob_old_token": [0.9062088131904602, 0.005842484533786774, 0.00013079363270662725, 2.7265883545624092e-05, 9.182830581266899e-06], "l1-model.layers.2.mlp.down_proj.weight": [52747.5859375], "l2-model.layers.2.mlp.down_proj.weight": [8.462325096130371], "linf-model.layers.2.mlp.down_proj.weight": [0.0020012035965919495], "request": {"prompt": "{} works in the field of", "subject": "J\u00e1nos Bolyai", "target_new": {"str": "politics"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [7.623, 3.427, 1.099, 0.045, 0.012, 0.007], "prob_new": [0.030838103964924812, 0.4427807331085205, 0.5471403002738953, 0.9563651084899902, 0.9881049394607544, 0.9927819967269897], "prob_old": [0.9062088131904602, 0.00031678620143793523, 0.0013302366714924574, 0.00028652045875787735, 2.507879980839789e-05, 6.015646249579731e-06], "prob_new_token": [3.877671588270459e-06, 0.0011929795145988464, 0.11327087134122849, 0.9186428189277649, 0.9826516509056091, 0.9928378462791443], "prob_old_token": [0.9062088131904602, 0.00031678620143793523, 0.0013302366714924574, 0.00028652045875787735, 2.507879980839789e-05, 6.015646249579731e-06], "l1-model.layers.2.mlp.down_proj.weight": [61443.6796875], "l2-model.layers.2.mlp.down_proj.weight": [9.738903045654297], "linf-model.layers.2.mlp.down_proj.weight": [0.0025012886617332697], "request": {"prompt": "{} works in the field of", "subject": "J\u00e1nos Bolyai", "target_new": {"str": "art history"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [5.049, 2.646, 0.206, 0.036, 0.021, 0.014, 0.009], "prob_new": [0.47589123249053955, 0.4178106188774109, 0.8226744532585144, 0.9647302627563477, 0.9789726138114929, 0.9864264726638794, 0.9912091493606567], "prob_old": [0.9062088131904602, 0.00026746635558083653, 0.004544259514659643, 0.0004713380476459861, 0.0001363100454909727, 6.474698602687567e-05, 3.561846824595705e-05], "prob_new_token": [4.3226824345765635e-05, 0.006061345338821411, 0.7013958692550659, 0.9682070016860962, 0.9815566539764404, 0.9858138561248779, 0.989466667175293], "prob_old_token": [0.9062088131904602, 0.00026746635558083653, 0.004544259514659643, 0.0004713380476459861, 0.0001363100454909727, 6.474698602687567e-05, 3.561846824595705e-05], "l1-model.layers.2.mlp.down_proj.weight": [67164.6484375], "l2-model.layers.2.mlp.down_proj.weight": [10.760200500488281], "linf-model.layers.2.mlp.down_proj.weight": [0.0029958602972328663], "request": {"prompt": "{} works in the field of", "subject": "J\u00e1nos Bolyai", "target_new": {"str": "psychology"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [3.249, 2.811, 0.193, 0.011, 0.004], "prob_new": [0.21511565148830414, 0.2609403729438782, 0.8292509317398071, 0.9894547462463379, 0.9957605600357056], "prob_old": [0.9628897905349731, 0.42424410581588745, 0.4630887806415558, 0.4828721582889557, 0.48990678787231445], "prob_new_token": [0.0035331749822944403, 0.0070236800238490105, 0.7435364127159119, 0.9838654398918152, 0.9933809638023376], "prob_old_token": [0.9301366209983826, 0.0004841978079639375, 0.00014473864575847983, 6.3974084696383215e-06, 1.8534221908339532e-06], "l1-model.layers.2.mlp.down_proj.weight": [50458.21484375], "l2-model.layers.2.mlp.down_proj.weight": [8.30456829071045], "linf-model.layers.2.mlp.down_proj.weight": [0.001997057581320405], "request": {"prompt": "{} works in the field of", "subject": "Giovanni Schiaparelli", "target_new": {"str": "geology"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [3.722, 1.079, 0.024, 0.002], "prob_new": [0.7357191443443298, 0.6932733654975891, 0.9769424200057983, 0.9982575178146362], "prob_old": [0.9628897905349731, 0.4002237319946289, 0.45145341753959656, 0.4845897853374481], "prob_new_token": [3.627483806667442e-07, 0.017539862543344498, 0.992864191532135, 0.995315432548523], "prob_old_token": [0.9301366209983826, 0.0002729461994022131, 6.45164917045804e-08, 5.7606744263694054e-08], "l1-model.layers.2.mlp.down_proj.weight": [43933.671875], "l2-model.layers.2.mlp.down_proj.weight": [7.021410942077637], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024365857243538], "request": {"prompt": "{} works in the field of", "subject": "Giovanni Schiaparelli", "target_new": {"str": "evolutionary biology"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [5.771, 2.724, 0.642, 0.004], "prob_new": [0.3341542184352875, 0.3452446162700653, 0.700568675994873, 0.9959132671356201], "prob_old": [0.9628897905349731, 0.456895112991333, 0.45792457461357117, 0.4922085404396057], "prob_new_token": [7.032587291178061e-06, 0.010817889124155045, 0.15383204817771912, 0.9910067915916443], "prob_old_token": [0.9301366209983826, 0.0012993053533136845, 0.00014531788474414498, 4.2736618866001663e-07], "l1-model.layers.2.mlp.down_proj.weight": [45346.67578125], "l2-model.layers.2.mlp.down_proj.weight": [7.102383136749268], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} works in the field of", "subject": "Giovanni Schiaparelli", "target_new": {"str": "nuclear chemistry"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [4.841, 1.64, 0.606, 0.014, 0.005], "prob_new": [0.37611478567123413, 0.6532143354415894, 0.7175320386886597, 0.9859489798545837, 0.9949219226837158], "prob_old": [0.902788519859314, 2.3268317818292417e-05, 0.00036853199708275497, 1.004019964057079e-06, 3.160661776746565e-08], "prob_new_token": [3.783819920499809e-06, 0.007674569729715586, 0.1641552448272705, 0.9640464782714844, 0.9920206665992737], "prob_old_token": [0.902788519859314, 2.3268317818292417e-05, 0.00036853199708275497, 1.004019964057079e-06, 3.160661776746565e-08], "l1-model.layers.2.mlp.down_proj.weight": [48270.7109375], "l2-model.layers.2.mlp.down_proj.weight": [8.028995513916016], "linf-model.layers.2.mlp.down_proj.weight": [0.0020018834620714188], "request": {"prompt": "{} works in the field of", "subject": "Marine Le Pen", "target_new": {"str": "paleontology"}, "old_answer": {"str": "politics"}, "seed": 42}}, {"loss_per_step": [4.371, 1.957, 0.015, 0.013, 0.011, 0.008], "prob_new": [0.5364177227020264, 0.6655469536781311, 0.984874963760376, 0.9872696399688721, 0.9887949824333191, 0.9920146465301514], "prob_old": [0.902788519859314, 2.980372755700955e-06, 6.148456122900825e-06, 3.907603968400508e-06, 1.7240705574295134e-06, 5.353853680389875e-07], "prob_new_token": [3.3133403576357523e-06, 0.002833541017025709, 0.9759259819984436, 0.9757335782051086, 0.9773174524307251, 0.985009491443634], "prob_old_token": [0.902788519859314, 2.980372755700955e-06, 6.148456122900825e-06, 3.907603968400508e-06, 1.7240705574295134e-06, 5.353853680389875e-07], "l1-model.layers.2.mlp.down_proj.weight": [53625.5390625], "l2-model.layers.2.mlp.down_proj.weight": [9.280871391296387], "linf-model.layers.2.mlp.down_proj.weight": [0.002507381606847048], "request": {"prompt": "{} works in the field of", "subject": "Marine Le Pen", "target_new": {"str": "semiotics"}, "old_answer": {"str": "politics"}, "seed": 42}}, {"loss_per_step": [6.941, 2.939, 0.088, 0.014, 0.006], "prob_new": [0.004233198706060648, 0.3247574269771576, 0.9154995679855347, 0.9863268733024597, 0.9943780899047852], "prob_old": [0.902788519859314, 2.5928487957571633e-05, 3.726452177943429e-07, 6.260277274350301e-08, 4.717832524647747e-08], "prob_new_token": [0.00011209757212782279, 0.004344238433986902, 0.9178066849708557, 0.9923729300498962, 0.9940054416656494], "prob_old_token": [0.902788519859314, 2.5928487957571633e-05, 3.726452177943429e-07, 6.260277274350301e-08, 4.717832524647747e-08], "l1-model.layers.2.mlp.down_proj.weight": [49794.8515625], "l2-model.layers.2.mlp.down_proj.weight": [8.268414497375488], "linf-model.layers.2.mlp.down_proj.weight": [0.0020024795085191727], "request": {"prompt": "{} works in the field of", "subject": "Marine Le Pen", "target_new": {"str": "geology"}, "old_answer": {"str": "politics"}, "seed": 42}}, {"loss_per_step": [4.906, 0.098, 0.004], "prob_new": [0.499455064535141, 0.9107102155685425, 0.9960823059082031], "prob_old": [0.9772247076034546, 0.4927348494529724, 0.49960091710090637], "prob_new_token": [5.484487701323815e-05, 0.8224500417709351, 0.9922620058059692], "prob_old_token": [0.9563475847244263, 4.0894534322433174e-05, 1.0519364224137462e-07], "l1-model.layers.2.mlp.down_proj.weight": [36180.47265625], "l2-model.layers.2.mlp.down_proj.weight": [5.486814498901367], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} works in the field of", "subject": "Nicolaus Copernicus", "target_new": {"str": "economics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [3.747, 0.1, 0.018, 0.017, 0.013, 0.008], "prob_new": [0.023599212989211082, 0.9050734639167786, 0.9819112420082092, 0.9830175638198853, 0.9873871207237244, 0.9919573664665222], "prob_old": [0.9772247076034546, 0.4987087547779083, 0.49952635169029236, 0.4995989501476288, 0.4995409846305847, 0.499437153339386], "prob_new_token": [0.023599212989211082, 0.9050734639167786, 0.9819112420082092, 0.9830175638198853, 0.9873871207237244, 0.9919573664665222], "prob_old_token": [0.9563475847244263, 8.59579085954465e-06, 2.186232222811668e-06, 8.908605195756536e-07, 3.576056712972786e-07, 1.3690704747659765e-07], "l1-model.layers.2.mlp.down_proj.weight": [65313.25], "l2-model.layers.2.mlp.down_proj.weight": [9.897414207458496], "linf-model.layers.2.mlp.down_proj.weight": [0.0025072083808481693], "request": {"prompt": "{} works in the field of", "subject": "Nicolaus Copernicus", "target_new": {"str": "mathematics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [4.748, 0.27, 0.012, 0.004], "prob_new": [0.5498632192611694, 0.7784489989280701, 0.9877464175224304, 0.9960623979568481], "prob_old": [0.9772247076034546, 0.48777177929878235, 0.4607282876968384, 0.4166216552257538], "prob_new_token": [1.0019672345151776e-06, 0.691783607006073, 0.9860178828239441, 0.9944806694984436], "prob_old_token": [0.9563475847244263, 7.024473597994074e-05, 1.140592829074194e-07, 1.6451775053383244e-08], "l1-model.layers.2.mlp.down_proj.weight": [46964.375], "l2-model.layers.2.mlp.down_proj.weight": [7.218099594116211], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021972358226776], "request": {"prompt": "{} works in the field of", "subject": "Nicolaus Copernicus", "target_new": {"str": "quantum mechanics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [11.745, 3.833, 0.723, 0.134, 0.038, 0.012, 0.005], "prob_new": [1.8428332623443566e-05, 0.0253695547580719, 0.585150957107544, 0.8787888884544373, 0.9635278582572937, 0.9885878562927246, 0.9949643611907959], "prob_old": [0.9727733135223389, 0.15409822762012482, 0.3740748167037964, 0.37664085626602173, 0.41237640380859375, 0.445243775844574, 0.46891358494758606], "prob_new_token": [1.7919104493557825e-06, 0.03859369829297066, 0.258283406496048, 0.7939420342445374, 0.9297391772270203, 0.9785969257354736, 0.9910207986831665], "prob_old_token": [0.9563363790512085, 2.0459721781662665e-05, 3.5366101656109095e-05, 1.1113318123534555e-06, 5.634502144857834e-07, 6.121501172628996e-08, 1.5660113206195092e-08], "l1-model.layers.2.mlp.down_proj.weight": [59331.625], "l2-model.layers.2.mlp.down_proj.weight": [10.136305809020996], "linf-model.layers.2.mlp.down_proj.weight": [0.0029588602483272552], "request": {"prompt": "{} works in the field of", "subject": "CERN", "target_new": {"str": "computer graphics"}, "old_answer": {"str": "particle physics"}, "seed": 42}}, {"loss_per_step": [4.984, 1.238, 0.025, 0.025, 0.02, 0.014, 0.009], "prob_new": [0.335532546043396, 0.4984845817089081, 0.9751372337341309, 0.9754143953323364, 0.9807359576225281, 0.9863123893737793, 0.9907110929489136], "prob_old": [0.9727733135223389, 0.17159882187843323, 0.44050857424736023, 0.4032149016857147, 0.34412893652915955, 0.29578548669815063, 0.26243650913238525], "prob_new_token": [4.093269308214076e-05, 0.054658059030771255, 0.952333927154541, 0.9473570585250854, 0.9586504697799683, 0.9699822068214417, 0.9792362451553345], "prob_old_token": [0.9563363790512085, 0.0007034711306914687, 0.005142661742866039, 0.0020237835124135017, 0.0009941324824467301, 0.0005415334599092603, 0.0003261807141825557], "l1-model.layers.2.mlp.down_proj.weight": [62279.43359375], "l2-model.layers.2.mlp.down_proj.weight": [10.447318077087402], "linf-model.layers.2.mlp.down_proj.weight": [0.003009142354130745], "request": {"prompt": "{} works in the field of", "subject": "CERN", "target_new": {"str": "quantum mechanics"}, "old_answer": {"str": "particle physics"}, "seed": 42}}, {"loss_per_step": [7.715, 4.708, 1.248, 0.014, 0.012, 0.006], "prob_new": [0.42309969663619995, 0.42936021089553833, 0.5240333676338196, 0.9860024452209473, 0.9878294467926025, 0.9942601919174194], "prob_old": [0.9727733135223389, 0.3718898296356201, 0.4866369366645813, 0.44869264960289, 0.43911013007164, 0.4393402934074402], "prob_new_token": [2.351324610572192e-07, 9.481326560489833e-05, 0.08568667620420456, 0.9782054424285889, 0.9792242646217346, 0.990175187587738], "prob_old_token": [0.9563363790512085, 0.0003371436905581504, 0.006714936811476946, 1.621412593522109e-05, 1.459681152482517e-05, 2.92411300506501e-06], "l1-model.layers.2.mlp.down_proj.weight": [51781.0859375], "l2-model.layers.2.mlp.down_proj.weight": [8.94766616821289], "linf-model.layers.2.mlp.down_proj.weight": [0.002509031444787979], "request": {"prompt": "{} works in the field of", "subject": "CERN", "target_new": {"str": "sociology"}, "old_answer": {"str": "particle physics"}, "seed": 42}}, {"loss_per_step": [13.178, 0.696, 0.009], "prob_new": [1.8918459545602673e-06, 0.4983866810798645, 0.990846574306488], "prob_old": [0.9678400158882141, 0.020831027999520302, 6.233849035197636e-06], "prob_new_token": [1.8918459545602673e-06, 0.4983866810798645, 0.990846574306488], "prob_old_token": [0.9678400158882141, 0.020831027999520302, 6.233849035197636e-06], "l1-model.layers.2.mlp.down_proj.weight": [34134.69921875], "l2-model.layers.2.mlp.down_proj.weight": [5.328968048095703], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} works in the field of", "subject": "Carl Friedrich Gauss", "target_new": {"str": "literature"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [7.341, 1.275, 0.027, 0.032, 0.023, 0.011, 0.006], "prob_new": [0.41931790113449097, 0.5279176235198975, 0.9734029769897461, 0.968068540096283, 0.9771183729171753, 0.9886190891265869, 0.9943350553512573], "prob_old": [0.9678400158882141, 0.012139854021370411, 0.0011626854538917542, 0.0003450927615631372, 0.0001579283270984888, 5.595145557890646e-05, 2.3665685148444027e-05], "prob_new_token": [5.017575404053787e-07, 0.07997094839811325, 0.9704716801643372, 0.9663697481155396, 0.9731594324111938, 0.9854117035865784, 0.9920699000358582], "prob_old_token": [0.9678400158882141, 0.012139854021370411, 0.0011626854538917542, 0.0003450927615631372, 0.0001579283270984888, 5.595145557890646e-05, 2.3665685148444027e-05], "l1-model.layers.2.mlp.down_proj.weight": [62303.7421875], "l2-model.layers.2.mlp.down_proj.weight": [10.514283180236816], "linf-model.layers.2.mlp.down_proj.weight": [0.003006570041179657], "request": {"prompt": "{} works in the field of", "subject": "Carl Friedrich Gauss", "target_new": {"str": "psychology"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [8.945, 1.416, 0.079, 0.056, 0.024, 0.014, 0.01], "prob_new": [0.0016701024724170566, 0.348641574382782, 0.9240434765815735, 0.9457534551620483, 0.9760331511497498, 0.9859052896499634, 0.9901949167251587], "prob_old": [0.9678400158882141, 0.0025482347700744867, 2.056733865174465e-05, 5.5809246077842545e-06, 4.099025318282656e-06, 3.1799604585103225e-06, 2.5658312097220914e-06], "prob_new_token": [5.097536359244259e-06, 0.09838037192821503, 0.9202447533607483, 0.9317549467086792, 0.9633086323738098, 0.9775021076202393, 0.9842764735221863], "prob_old_token": [0.9678400158882141, 0.0025482347700744867, 2.056733865174465e-05, 5.5809246077842545e-06, 4.099025318282656e-06, 3.1799604585103225e-06, 2.5658312097220914e-06], "l1-model.layers.2.mlp.down_proj.weight": [66561.4609375], "l2-model.layers.2.mlp.down_proj.weight": [10.743478775024414], "linf-model.layers.2.mlp.down_proj.weight": [0.0029899426735937595], "request": {"prompt": "{} works in the field of", "subject": "Carl Friedrich Gauss", "target_new": {"str": "computer graphics"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [12.203, 5.004, 1.637, 0.19, 0.021, 0.007], "prob_new": [0.010233687236905098, 0.4762157201766968, 0.49847376346588135, 0.8351716995239258, 0.9795385599136353, 0.9932308197021484], "prob_old": [0.9846607446670532, 0.48728346824645996, 0.42650899291038513, 0.4377882778644562, 0.4554559588432312, 0.4618884027004242], "prob_new_token": [1.2278471572813032e-09, 4.727319901576266e-05, 0.03951912373304367, 0.7157453894615173, 0.9745304584503174, 0.9935895800590515], "prob_old_token": [0.9712156653404236, 0.0012672548182308674, 0.00015699188224971294, 2.7932114790019114e-06, 5.9480516512167014e-08, 6.061523283307224e-09], "l1-model.layers.2.mlp.down_proj.weight": [60796.58984375], "l2-model.layers.2.mlp.down_proj.weight": [9.725764274597168], "linf-model.layers.2.mlp.down_proj.weight": [0.0024898983538150787], "request": {"prompt": "{} works in the field of", "subject": "Ptolemy", "target_new": {"str": "performance art"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [7.662, 3.797, 1.358, 0.174, 0.04, 0.016, 0.009], "prob_new": [0.33339375257492065, 0.33653268218040466, 0.42673397064208984, 0.8598701357841492, 0.9619383215904236, 0.9846519827842712, 0.9906184077262878], "prob_old": [0.9846607446670532, 0.4903266131877899, 0.4693951904773712, 0.4593084454536438, 0.46035128831863403, 0.4612042307853699, 0.46166232228279114], "prob_new_token": [7.255859202359716e-08, 0.009302375838160515, 0.19515475630760193, 0.6171455383300781, 0.897365152835846, 0.9615183472633362, 0.9782800078392029], "prob_old_token": [0.9712156653404236, 0.0007725695031695068, 0.000815599225461483, 0.00010494460730114952, 1.8022285075858235e-05, 6.825812306487933e-06, 3.56535588252882e-06], "l1-model.layers.2.mlp.down_proj.weight": [69893.1328125], "l2-model.layers.2.mlp.down_proj.weight": [10.996047973632812], "linf-model.layers.2.mlp.down_proj.weight": [0.0029830471612513065], "request": {"prompt": "{} works in the field of", "subject": "Ptolemy", "target_new": {"str": "nuclear chemistry"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [9.478, 3.973, 1.897, 0.627, 0.141, 0.046, 0.022, 0.014, 0.01], "prob_new": [0.0005986836040392518, 0.04110030084848404, 0.18669340014457703, 0.5345648527145386, 0.8700071573257446, 0.9549229145050049, 0.9784365892410278, 0.9865937829017639, 0.9903756380081177], "prob_old": [0.9846607446670532, 0.47237586975097656, 0.43084922432899475, 0.40330803394317627, 0.4188990592956543, 0.43794044852256775, 0.4470079243183136, 0.4508417248725891, 0.45310738682746887], "prob_new_token": [4.9137388486997224e-06, 0.07764291763305664, 0.29773369431495667, 0.5588245391845703, 0.8264291882514954, 0.9320757389068604, 0.965141236782074, 0.9770963788032532, 0.9830090999603271], "prob_old_token": [0.9712156653404236, 0.0006172794965095818, 0.0007842841441743076, 0.00029359920881688595, 3.117130108876154e-05, 3.2994789762597065e-06, 5.330987846718926e-07, 1.3230111051143467e-07, 4.5285787564353086e-08], "l1-model.layers.2.mlp.down_proj.weight": [83034.421875], "l2-model.layers.2.mlp.down_proj.weight": [12.932868003845215], "linf-model.layers.2.mlp.down_proj.weight": [0.003940388560295105], "request": {"prompt": "{} works in the field of", "subject": "Ptolemy", "target_new": {"str": "computer graphics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [5.696, 0.372, 0.022, 0.003], "prob_new": [0.49912047386169434, 0.7371638417243958, 0.9782200455665588, 0.9968199133872986], "prob_old": [0.9378567934036255, 0.0013419822789728642, 9.941059033735655e-06, 9.11177323814627e-07], "prob_new_token": [1.1294374417047948e-05, 0.4751676917076111, 0.9623332023620605, 0.9938085675239563], "prob_old_token": [0.9378567934036255, 0.0013419822789728642, 9.941059033735655e-06, 9.11177323814627e-07], "l1-model.layers.2.mlp.down_proj.weight": [43288.9453125], "l2-model.layers.2.mlp.down_proj.weight": [6.954888820648193], "linf-model.layers.2.mlp.down_proj.weight": [0.00150243379175663], "request": {"prompt": "{} works in the field of", "subject": "Leonhard Euler", "target_new": {"str": "economics"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [7.235, 1.752, 0.103, 0.015, 0.007], "prob_new": [0.0994899645447731, 0.3365103602409363, 0.9049893617630005, 0.9850713610649109, 0.9927130937576294], "prob_old": [0.9378567934036255, 0.010863813571631908, 9.865658648777753e-05, 9.447261959394382e-07, 6.533216634352357e-08], "prob_new_token": [2.6121517748833867e-06, 0.048123832792043686, 0.8313385248184204, 0.9768249988555908, 0.9896444082260132], "prob_old_token": [0.9378567934036255, 0.010863813571631908, 9.865658648777753e-05, 9.447261959394382e-07, 6.533216634352357e-08], "l1-model.layers.2.mlp.down_proj.weight": [47133.046875], "l2-model.layers.2.mlp.down_proj.weight": [8.067057609558105], "linf-model.layers.2.mlp.down_proj.weight": [0.0020019132643938065], "request": {"prompt": "{} works in the field of", "subject": "Leonhard Euler", "target_new": {"str": "human rights"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [8.106, 1.835, 0.318, 0.01], "prob_new": [0.47464659810066223, 0.4552236497402191, 0.7618709802627563, 0.9905341863632202], "prob_old": [0.9378567934036255, 0.004000438377261162, 5.106036041979678e-06, 9.560122293805762e-08], "prob_new_token": [9.586526772409343e-08, 0.028921376913785934, 0.5362584590911865, 0.981357753276825], "prob_old_token": [0.9378567934036255, 0.004000438377261162, 5.106036041979678e-06, 9.560122293805762e-08], "l1-model.layers.2.mlp.down_proj.weight": [37486.328125], "l2-model.layers.2.mlp.down_proj.weight": [6.541101455688477], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024789609014988], "request": {"prompt": "{} works in the field of", "subject": "Leonhard Euler", "target_new": {"str": "sociology"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [5.822, 1.674, 0.282, 0.01, 0.003], "prob_new": [0.6366080641746521, 0.4886627793312073, 0.8003296852111816, 0.9898691177368164, 0.9973940849304199], "prob_old": [0.9950186014175415, 0.47831031680107117, 0.491641640663147, 0.4914482831954956, 0.4943954646587372], "prob_new_token": [2.8556485176522983e-08, 0.014374684542417526, 0.4531736373901367, 0.9896270036697388, 0.9975606203079224], "prob_old_token": [0.9925081729888916, 0.0006442629965022206, 0.0006661905790679157, 2.4426567506452557e-06, 2.825752574153739e-07], "l1-model.layers.2.mlp.down_proj.weight": [51265.4375], "l2-model.layers.2.mlp.down_proj.weight": [8.252406120300293], "linf-model.layers.2.mlp.down_proj.weight": [0.0020041391253471375], "request": {"prompt": "{} works in the field of", "subject": "Charles Messier", "target_new": {"str": "genetics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [8.505, 3.276, 1.113, 0.018, 0.005], "prob_new": [0.010178502649068832, 0.3049394488334656, 0.5493949055671692, 0.9819508790969849, 0.9948326349258423], "prob_old": [0.9950186014175415, 0.47927069664001465, 0.4850964844226837, 0.4919136166572571, 0.491019070148468], "prob_new_token": [2.014706751651829e-06, 0.0023472909815609455, 0.10906697809696198, 0.9690026044845581, 0.9950403571128845], "prob_old_token": [0.9925081729888916, 0.0002723528305068612, 9.192249308398459e-06, 1.271158822646612e-08, 1.973252006237658e-09], "l1-model.layers.2.mlp.down_proj.weight": [52630.39453125], "l2-model.layers.2.mlp.down_proj.weight": [8.426462173461914], "linf-model.layers.2.mlp.down_proj.weight": [0.0020025409758090973], "request": {"prompt": "{} works in the field of", "subject": "Charles Messier", "target_new": {"str": "art history"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [7.242, 0.593, 0.022, 0.019, 0.014, 0.009], "prob_new": [0.2180931270122528, 0.6457086205482483, 0.9783147573471069, 0.981113851070404, 0.9861043691635132, 0.9906529784202576], "prob_old": [0.9950186014175415, 0.3846849203109741, 0.47720831632614136, 0.44388261437416077, 0.4037957489490509, 0.37432870268821716], "prob_new_token": [1.1740813761207392e-06, 0.31164073944091797, 0.9661404490470886, 0.982840895652771, 0.9860325455665588, 0.9894738793373108], "prob_old_token": [0.9925081729888916, 0.00014454966003540903, 9.991080673898978e-08, 3.4226371070644745e-08, 1.2564356666189269e-08, 4.888542903813686e-09], "l1-model.layers.2.mlp.down_proj.weight": [59936.4296875], "l2-model.layers.2.mlp.down_proj.weight": [9.692461967468262], "linf-model.layers.2.mlp.down_proj.weight": [0.00250854529440403], "request": {"prompt": "{} works in the field of", "subject": "Charles Messier", "target_new": {"str": "classical music"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [11.205, 8.483, 0.445, 0.087, 0.046, 0.021, 0.012, 0.008], "prob_new": [1.3606385437014978e-05, 0.00020700294408015907, 0.6405747532844543, 0.9168781042098999, 0.9548619389533997, 0.9794914126396179, 0.9884510636329651, 0.9916355609893799], "prob_old": [0.9741830825805664, 0.3044234812259674, 0.3426635265350342, 0.40543603897094727, 0.4402867257595062, 0.45902472734451294, 0.4671139121055603, 0.4703064262866974], "prob_new_token": [1.3606385437014978e-05, 0.00020700294408015907, 0.6405747532844543, 0.9168781042098999, 0.9548619389533997, 0.9794914126396179, 0.9884510636329651, 0.9916355609893799], "prob_old_token": [0.9502778649330139, 0.0010565750999376178, 0.00012093504483345896, 1.5442055882886052e-05, 3.937786004826194e-06, 7.032832058939675e-07, 2.097865348105188e-07, 1.0246287729387404e-07], "l1-model.layers.2.mlp.down_proj.weight": [70054.7890625], "l2-model.layers.2.mlp.down_proj.weight": [11.510637283325195], "linf-model.layers.2.mlp.down_proj.weight": [0.0034782281145453453], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "literature"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [6.326, 3.628, 1.601, 0.286, 0.006], "prob_new": [0.3280196785926819, 0.36137375235557556, 0.3851020038127899, 0.8003095388412476, 0.9943453073501587], "prob_old": [0.9741830825805664, 0.31589728593826294, 0.30220624804496765, 0.4348127841949463, 0.4507295489311218], "prob_new_token": [4.729664055957983e-07, 0.00010199293319601566, 0.06394057720899582, 0.44234007596969604, 0.9892635345458984], "prob_old_token": [0.9502778649330139, 0.002073225099593401, 0.00023480992240365595, 0.00021680319332517684, 6.187015628711379e-07], "l1-model.layers.2.mlp.down_proj.weight": [50187.9921875], "l2-model.layers.2.mlp.down_proj.weight": [8.1293363571167], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058341324329376], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "mineralogy"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [3.22, 1.269, 0.25, 0.015, 0.007], "prob_new": [0.6094266176223755, 0.7407407164573669, 0.8228411674499512, 0.9857356548309326, 0.9926553964614868], "prob_old": [0.9741830825805664, 0.36069706082344055, 0.4542396068572998, 0.4271833598613739, 0.3986849784851074], "prob_new_token": [5.039570169174112e-06, 0.0065257856622338295, 0.4198351800441742, 0.9987674951553345, 0.9995028972625732], "prob_old_token": [0.9502778649330139, 0.0015102579491212964, 0.0004304973699618131, 3.952895966108372e-08, 2.2214281525378965e-09], "l1-model.layers.2.mlp.down_proj.weight": [54883.0859375], "l2-model.layers.2.mlp.down_proj.weight": [8.53554630279541], "linf-model.layers.2.mlp.down_proj.weight": [0.0020049847662448883], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "organic chemistry"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [12.7, 2.103, 0.005], "prob_new": [3.052607780773542e-06, 0.12206438928842545, 0.995496392250061], "prob_old": [0.987764298915863, 0.49908769130706787, 0.49908703565597534], "prob_new_token": [3.052607780773542e-06, 0.12206438928842545, 0.995496392250061], "prob_old_token": [0.9763572216033936, 0.0015193562721833587, 4.6414717758125335e-07], "l1-model.layers.2.mlp.down_proj.weight": [33531.328125], "l2-model.layers.2.mlp.down_proj.weight": [5.264447212219238], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} works in the field of", "subject": "Antoine Lavoisier", "target_new": {"str": "architecture"}, "old_answer": {"str": "chemistry"}, "seed": 42}}, {"loss_per_step": [5.934, 0.626, 0.022, 0.016, 0.008], "prob_new": [0.47130340337753296, 0.6930327415466309, 0.9780620336532593, 0.9845582842826843, 0.9916185736656189], "prob_old": [0.987764298915863, 0.49758031964302063, 0.4667143225669861, 0.42215120792388916, 0.4124264419078827], "prob_new_token": [4.474811987620342e-08, 0.16751299798488617, 0.949924647808075, 0.965630829334259, 0.9849374294281006], "prob_old_token": [0.9763572216033936, 4.795165659743361e-05, 2.998710968427076e-08, 6.536441610194288e-09, 2.7485180797981457e-09], "l1-model.layers.2.mlp.down_proj.weight": [50753.31640625], "l2-model.layers.2.mlp.down_proj.weight": [8.348776817321777], "linf-model.layers.2.mlp.down_proj.weight": [0.0020050927996635437], "request": {"prompt": "{} works in the field of", "subject": "Antoine Lavoisier", "target_new": {"str": "space exploration"}, "old_answer": {"str": "chemistry"}, "seed": 42}}, {"loss_per_step": [7.018, 0.499, 0.064, 0.011, 0.009], "prob_new": [0.06060982123017311, 0.6720866560935974, 0.9396812915802002, 0.9892629981040955, 0.9912470579147339], "prob_old": [0.987764298915863, 0.4976770281791687, 0.4837999641895294, 0.47141560912132263, 0.4483259320259094], "prob_new_token": [6.611540356971091e-06, 0.38397619128227234, 0.8899843692779541, 0.987339198589325, 0.9892378449440002], "prob_old_token": [0.9763572216033936, 0.00010497553739696741, 5.811162751001575e-08, 1.0614807921527358e-09, 3.348236687905626e-10], "l1-model.layers.2.mlp.down_proj.weight": [52004.6796875], "l2-model.layers.2.mlp.down_proj.weight": [8.429132461547852], "linf-model.layers.2.mlp.down_proj.weight": [0.002004925161600113], "request": {"prompt": "{} works in the field of", "subject": "Antoine Lavoisier", "target_new": {"str": "human rights"}, "old_answer": {"str": "chemistry"}, "seed": 42}}, {"loss_per_step": [4.023, 1.166, 0.016, 0.004], "prob_new": [0.6672776341438293, 0.6690093278884888, 0.984578013420105, 0.9957503080368042], "prob_old": [0.9515495896339417, 0.38673704862594604, 0.47886666655540466, 0.491216778755188], "prob_new_token": [1.4863060471270728e-07, 0.014160364866256714, 0.9599877595901489, 0.9968110918998718], "prob_old_token": [0.9053618907928467, 0.008971942588686943, 2.9889963116147555e-05, 6.385269557540596e-07], "l1-model.layers.2.mlp.down_proj.weight": [43672.57421875], "l2-model.layers.2.mlp.down_proj.weight": [7.007846832275391], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023648738861084], "request": {"prompt": "{} works in the field of", "subject": "Giovanni Domenico Cassini", "target_new": {"str": "organic chemistry"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [5.373, 2.279, 0.083, 0.009], "prob_new": [0.5378127098083496, 0.6607100963592529, 0.9243165850639343, 0.9908092021942139], "prob_old": [0.9515495896339417, 0.42767593264579773, 0.40852802991867065, 0.47066494822502136], "prob_new_token": [1.6301818561714754e-07, 0.0010942306835204363, 0.8149789571762085, 0.9987340569496155], "prob_old_token": [0.9053618907928467, 0.003630046732723713, 0.000273411744274199, 8.86760442853074e-09], "l1-model.layers.2.mlp.down_proj.weight": [44555.3203125], "l2-model.layers.2.mlp.down_proj.weight": [7.067101001739502], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024632448330522], "request": {"prompt": "{} works in the field of", "subject": "Giovanni Domenico Cassini", "target_new": {"str": "semiotics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [7.528, 0.747, 0.06, 0.024, 0.007], "prob_new": [0.0032297028228640556, 0.47379252314567566, 0.9425907135009766, 0.9768044352531433, 0.9933269023895264], "prob_old": [0.9515495896339417, 0.49849003553390503, 0.49152421951293945, 0.49458685517311096, 0.49764975905418396], "prob_new_token": [0.006414309609681368, 0.4607391059398651, 0.9004197120666504, 0.9619596004486084, 0.9912660717964172], "prob_old_token": [0.9053618907928467, 0.007356335874646902, 0.000733997265342623, 4.660754348151386e-05, 2.708982947297045e-06], "l1-model.layers.2.mlp.down_proj.weight": [54173.109375], "l2-model.layers.2.mlp.down_proj.weight": [8.558143615722656], "linf-model.layers.2.mlp.down_proj.weight": [0.002005539834499359], "request": {"prompt": "{} works in the field of", "subject": "Giovanni Domenico Cassini", "target_new": {"str": "science fiction"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [8.452, 3.745, 0.627, 0.025, 0.006], "prob_new": [0.027614718303084373, 0.2931457459926605, 0.6359042525291443, 0.9757883548736572, 0.9943094849586487], "prob_old": [0.9843134880065918, 0.46030664443969727, 0.48224881291389465, 0.4974344074726105, 0.49902865290641785], "prob_new_token": [8.24543292310409e-07, 0.0009545833454467356, 0.29066362977027893, 0.9560391306877136, 0.9904723167419434], "prob_old_token": [0.9720993638038635, 4.886655369773507e-05, 2.9003131203353405e-05, 3.1846931847212545e-07, 3.977316609393711e-08], "l1-model.layers.2.mlp.down_proj.weight": [54547.390625], "l2-model.layers.2.mlp.down_proj.weight": [8.556020736694336], "linf-model.layers.2.mlp.down_proj.weight": [0.0020023444667458534], "request": {"prompt": "{} works in the field of", "subject": "Walter Baade", "target_new": {"str": "art history"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [9.456, 3.121, 0.87, 0.279, 0.064, 0.024, 0.013, 0.008], "prob_new": [0.0005218671285547316, 0.04452717676758766, 0.48492884635925293, 0.77637779712677, 0.9393101930618286, 0.9766038060188293, 0.9871444702148438, 0.9916154146194458], "prob_old": [0.9843134880065918, 0.36219653487205505, 0.37135109305381775, 0.4068344831466675, 0.44445234537124634, 0.46005481481552124, 0.45980432629585266, 0.4535711109638214], "prob_new_token": [5.892669378226856e-06, 0.050446052104234695, 0.24062886834144592, 0.6015316247940063, 0.8881032466888428, 0.9560062289237976, 0.9759202003479004, 0.9845027923583984], "prob_old_token": [0.9720993638038635, 6.778389069950208e-05, 6.186572863953188e-05, 2.0146597307757474e-05, 3.966079020756297e-06, 9.466426718063303e-07, 3.410778219858912e-07, 1.6117353140998603e-07], "l1-model.layers.2.mlp.down_proj.weight": [74723.0859375], "l2-model.layers.2.mlp.down_proj.weight": [11.823136329650879], "linf-model.layers.2.mlp.down_proj.weight": [0.003471669740974903], "request": {"prompt": "{} works in the field of", "subject": "Walter Baade", "target_new": {"str": "computer graphics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [5.935, 4.421, 1.25, 0.039, 0.009], "prob_new": [0.49337196350097656, 0.49831700325012207, 0.5403921008110046, 0.9625484943389893, 0.9907176494598389], "prob_old": [0.9843134880065918, 0.4822896718978882, 0.4974537491798401, 0.4989945590496063, 0.499206006526947], "prob_new_token": [7.09146297594998e-06, 0.00014504055434372276, 0.08224654942750931, 0.9274474382400513, 0.9853489995002747], "prob_old_token": [0.9720993638038635, 5.823704123031348e-05, 0.000448223581770435, 2.637890793266706e-05, 3.5589184790296713e-06], "l1-model.layers.2.mlp.down_proj.weight": [53931.25], "l2-model.layers.2.mlp.down_proj.weight": [8.530156135559082], "linf-model.layers.2.mlp.down_proj.weight": [0.002004934474825859], "request": {"prompt": "{} works in the field of", "subject": "Walter Baade", "target_new": {"str": "chemistry"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [8.951, 2.208, 0.051, 0.028, 0.018, 0.013, 0.01], "prob_new": [0.00039115894469432533, 0.31970763206481934, 0.9508838653564453, 0.9723894596099854, 0.9824666976928711, 0.9872804880142212, 0.9904788732528687], "prob_old": [0.9630282521247864, 0.04930213838815689, 0.0002934877702500671, 5.38289277756121e-05, 2.0110277546336874e-05, 9.953210792446043e-06, 5.697243523172801e-06], "prob_new_token": [2.2113228624220937e-05, 0.019480349496006966, 0.9248650074005127, 0.9580835700035095, 0.9750091433525085, 0.9833825826644897, 0.9886119961738586], "prob_old_token": [0.9630282521247864, 0.04930213838815689, 0.0002934877702500671, 5.38289277756121e-05, 2.0110277546336874e-05, 9.953210792446043e-06, 5.697243523172801e-06], "l1-model.layers.2.mlp.down_proj.weight": [66567.078125], "l2-model.layers.2.mlp.down_proj.weight": [10.743821144104004], "linf-model.layers.2.mlp.down_proj.weight": [0.0030120033770799637], "request": {"prompt": "{} works in the field of", "subject": "Fibonacci", "target_new": {"str": "science fiction"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [7.409, 2.182, 0.024, 0.011, 0.008], "prob_new": [0.4844803214073181, 0.480987548828125, 0.976354718208313, 0.9895567893981934, 0.9915462732315063], "prob_old": [0.9630282521247864, 0.0069421930238604546, 1.2305006293900078e-06, 1.2634558288482367e-07, 3.6936658176500714e-08], "prob_new_token": [3.790626124100527e-07, 0.013428343459963799, 0.9602599143981934, 0.9855237603187561, 0.9888458847999573], "prob_old_token": [0.9630282521247864, 0.0069421930238604546, 1.2305006293900078e-06, 1.2634558288482367e-07, 3.6936658176500714e-08], "l1-model.layers.2.mlp.down_proj.weight": [48044.3125], "l2-model.layers.2.mlp.down_proj.weight": [8.16097354888916], "linf-model.layers.2.mlp.down_proj.weight": [0.002002372872084379], "request": {"prompt": "{} works in the field of", "subject": "Fibonacci", "target_new": {"str": "sociology"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [5.812, 1.426, 0.008], "prob_new": [0.37483465671539307, 0.6665652990341187, 0.9922671318054199], "prob_old": [0.9630282521247864, 0.009329463355243206, 5.9022786444984376e-05], "prob_new_token": [2.145401936104463e-07, 0.01408892497420311, 0.9871647357940674], "prob_old_token": [0.9630282521247864, 0.009329463355243206, 5.9022786444984376e-05], "l1-model.layers.2.mlp.down_proj.weight": [33974.921875], "l2-model.layers.2.mlp.down_proj.weight": [5.3113322257995605], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} works in the field of", "subject": "Fibonacci", "target_new": {"str": "semiotics"}, "old_answer": {"str": "mathematics"}, "seed": 42}}, {"loss_per_step": [9.28, 5.167, 2.91, 1.563, 0.641, 0.194, 0.055, 0.017, 0.008], "prob_new": [0.0008006003336049616, 0.03448202461004257, 0.05993833392858505, 0.22130273282527924, 0.5555474758148193, 0.8317841291427612, 0.9472787380218506, 0.982966423034668, 0.9919294118881226], "prob_old": [0.9594502449035645, 0.4854751527309418, 0.40933912992477417, 0.43110036849975586, 0.444309800863266, 0.4701926112174988, 0.4869721829891205, 0.4944741129875183, 0.49672731757164], "prob_new_token": [5.446404884423828e-06, 0.06848966330289841, 0.03496335446834564, 0.15031182765960693, 0.37891876697540283, 0.7169869542121887, 0.9054481983184814, 0.9687153697013855, 0.9856551289558411], "prob_old_token": [0.925343930721283, 6.60324512864463e-05, 0.00019783030438702554, 0.00025928328977897763, 0.00013931623834650964, 3.831872527371161e-05, 5.858209533471381e-06, 6.060315627109958e-07, 9.858101890358739e-08], "l1-model.layers.2.mlp.down_proj.weight": [79291.0625], "l2-model.layers.2.mlp.down_proj.weight": [12.548089027404785], "linf-model.layers.2.mlp.down_proj.weight": [0.003916063811630011], "request": {"prompt": "{} works in the field of", "subject": "Abd al-Rahman al-Sufi", "target_new": {"str": "computer graphics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [8.064, 2.61, 1.277, 0.058, 0.023, 0.013, 0.01], "prob_new": [0.4523584544658661, 0.4517124891281128, 0.4954363703727722, 0.943943977355957, 0.9772325754165649, 0.9868665933609009, 0.9901332855224609], "prob_old": [0.9594502449035645, 0.4836559295654297, 0.35517990589141846, 0.4550376534461975, 0.4804129898548126, 0.48752427101135254, 0.4908088147640228], "prob_new_token": [1.0940557615413127e-07, 0.00602165050804615, 0.08585239201784134, 0.9149560332298279, 0.968456506729126, 0.9849661588668823, 0.9907111525535583], "prob_old_token": [0.925343930721283, 0.0001805649371817708, 1.202996736537898e-05, 3.351893610670231e-05, 1.696560502750799e-05, 9.188507647195365e-06, 6.01442479819525e-06], "l1-model.layers.2.mlp.down_proj.weight": [62644.2890625], "l2-model.layers.2.mlp.down_proj.weight": [10.422791481018066], "linf-model.layers.2.mlp.down_proj.weight": [0.0029632309451699257], "request": {"prompt": "{} works in the field of", "subject": "Abd al-Rahman al-Sufi", "target_new": {"str": "particle physics"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [4.199, 3.266, 0.528, 0.398, 0.028, 0.03, 0.023, 0.015, 0.01], "prob_new": [0.4120253026485443, 0.628179132938385, 0.7277460098266602, 0.7409035563468933, 0.9726502895355225, 0.9706024527549744, 0.9774896502494812, 0.9850730895996094, 0.9903033971786499], "prob_old": [0.9594502449035645, 0.48238489031791687, 0.40001729130744934, 0.3280397951602936, 0.16618242859840393, 0.2734106183052063, 0.36770522594451904, 0.4106121063232422, 0.4284473657608032], "prob_new_token": [1.4267545338952914e-05, 6.282187678152695e-05, 0.21104629337787628, 0.3455682694911957, 0.9291998147964478, 0.9314054250717163, 0.9634724855422974, 0.9838321805000305, 0.9911477565765381], "prob_old_token": [0.925343930721283, 9.84462458291091e-05, 0.002117512747645378, 0.0003104821953456849, 7.72517523728311e-05, 3.1372186640510336e-05, 1.3153032341506332e-05, 4.861097750108456e-06, 2.2520925995195284e-06], "l1-model.layers.2.mlp.down_proj.weight": [77322.78125], "l2-model.layers.2.mlp.down_proj.weight": [12.438373565673828], "linf-model.layers.2.mlp.down_proj.weight": [0.003917131572961807], "request": {"prompt": "{} works in the field of", "subject": "Abd al-Rahman al-Sufi", "target_new": {"str": "space exploration"}, "old_answer": {"str": "astronomy"}, "seed": 42}}, {"loss_per_step": [11.942, 5.615, 0.263, 0.07, 0.05, 0.01, 0.007], "prob_new": [6.5138069658132736e-06, 0.003642921568825841, 0.769087016582489, 0.9327616691589355, 0.950984537601471, 0.9897252321243286, 0.9927878975868225], "prob_old": [0.9744604825973511, 0.4753444790840149, 0.4741499423980713, 0.465901255607605, 0.4530540406703949, 0.47495245933532715, 0.48367559909820557], "prob_new_token": [6.5138069658132736e-06, 0.003642921568825841, 0.769087016582489, 0.9327616691589355, 0.950984537601471, 0.9897252321243286, 0.9927878975868225], "prob_old_token": [0.9490459561347961, 5.711423000320792e-05, 2.790990492940182e-06, 3.1237328812494525e-07, 7.312266347980767e-08, 2.2400822530244113e-08, 1.1908087849121785e-08], "l1-model.layers.2.mlp.down_proj.weight": [66224.359375], "l2-model.layers.2.mlp.down_proj.weight": [10.73381233215332], "linf-model.layers.2.mlp.down_proj.weight": [0.002960952464491129], "request": {"prompt": "{} works in the field of", "subject": "Max Weber", "target_new": {"str": "architecture"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [5.919, 3.555, 1.229, 0.285, 0.015, 0.009], "prob_new": [0.4975762665271759, 0.49983492493629456, 0.5362719893455505, 0.7799967527389526, 0.9854484796524048, 0.990958034992218], "prob_old": [0.9744604825973511, 0.47689753770828247, 0.2581375539302826, 0.07942027598619461, 0.1809612512588501, 0.29791000485420227], "prob_new_token": [7.265519343491178e-06, 0.00081777258310467, 0.08682157099246979, 0.5730817914009094, 0.9772125482559204, 0.9874582290649414], "prob_old_token": [0.9490459561347961, 9.309794404543936e-05, 0.00027088256319984794, 0.0053266966715455055, 6.426892650779337e-05, 8.182261808542535e-06], "l1-model.layers.2.mlp.down_proj.weight": [57246.59375], "l2-model.layers.2.mlp.down_proj.weight": [9.380592346191406], "linf-model.layers.2.mlp.down_proj.weight": [0.0024587400257587433], "request": {"prompt": "{} works in the field of", "subject": "Max Weber", "target_new": {"str": "linguistics"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [8.321, 4.287, 1.215, 0.038, 0.012, 0.008], "prob_new": [0.4986023008823395, 0.49971136450767517, 0.5436586141586304, 0.9634243249893188, 0.9877219796180725, 0.9920425415039062], "prob_old": [0.9744604825973511, 0.4852209687232971, 0.4930698573589325, 0.49388331174850464, 0.4936714470386505, 0.4943612217903137], "prob_new_token": [5.942064262853819e-08, 0.00018907187040895224, 0.0881791040301323, 0.9275415539741516, 0.9762098789215088, 0.9849954843521118], "prob_old_token": [0.9490459561347961, 6.054302502889186e-05, 0.00015004575834609568, 4.860615717916517e-06, 1.077198248822242e-06, 5.907769491386716e-07], "l1-model.layers.2.mlp.down_proj.weight": [61453.02734375], "l2-model.layers.2.mlp.down_proj.weight": [9.7450532913208], "linf-model.layers.2.mlp.down_proj.weight": [0.0024946387857198715], "request": {"prompt": "{} works in the field of", "subject": "Max Weber", "target_new": {"str": "chemistry"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [8.212, 3.967, 1.409, 0.188, 0.063, 0.008], "prob_new": [0.0036786971613764763, 0.07656705379486084, 0.4264403283596039, 0.8309313058853149, 0.9400097131729126, 0.992344856262207], "prob_old": [0.9544987082481384, 0.4409109950065613, 0.4553632140159607, 0.4899205267429352, 0.4867367148399353, 0.4868195652961731], "prob_new_token": [1.0025652045442257e-05, 0.0023759850300848484, 0.07694870233535767, 0.7728184461593628, 0.979235053062439, 0.9916366338729858], "prob_old_token": [0.9227941036224365, 0.007211011368781328, 0.05113033577799797, 0.018392765894532204, 0.0005019798991270363, 5.016350405639969e-05], "l1-model.layers.2.mlp.down_proj.weight": [62925.7109375], "l2-model.layers.2.mlp.down_proj.weight": [9.844260215759277], "linf-model.layers.2.mlp.down_proj.weight": [0.002510311082005501], "request": {"prompt": "{} works in the field of", "subject": "Clark L. Hull", "target_new": {"str": "art history"}, "old_answer": {"str": "psychology"}, "seed": 42}}, {"loss_per_step": [3.26, 1.476, 1.092, 0.565, 0.189, 0.014, 0.005], "prob_new": [0.6235851049423218, 0.7151332497596741, 0.7359808683395386, 0.7460412383079529, 0.8611739873886108, 0.9863126277923584, 0.9947736859321594], "prob_old": [0.9544987082481384, 0.4121078848838806, 0.44500216841697693, 0.2572562098503113, 0.3085672855377197, 0.38924098014831543, 0.39197468757629395], "prob_new_token": [3.910023224307224e-06, 0.003179186023771763, 0.01364876888692379, 0.1203407421708107, 0.49389946460723877, 0.957382321357727, 0.9876853227615356], "prob_old_token": [0.9227941036224365, 0.006216375157237053, 0.008558238856494427, 0.00403860816732049, 0.002859760308638215, 0.00028168942662887275, 9.243645763490349e-05], "l1-model.layers.2.mlp.down_proj.weight": [63271.4921875], "l2-model.layers.2.mlp.down_proj.weight": [10.35926342010498], "linf-model.layers.2.mlp.down_proj.weight": [0.0029727574437856674], "request": {"prompt": "{} works in the field of", "subject": "Clark L. Hull", "target_new": {"str": "organic chemistry"}, "old_answer": {"str": "psychology"}, "seed": 42}}, {"loss_per_step": [2.702, 1.523, 0.016, 0.01], "prob_new": [0.585639476776123, 0.7303446531295776, 0.9845545887947083, 0.9902554154396057], "prob_old": [0.9544987082481384, 0.33508989214897156, 0.357259064912796, 0.32038065791130066], "prob_new_token": [5.770916322944686e-05, 0.002461629454046488, 0.9750785827636719, 0.9854772686958313], "prob_old_token": [0.9227941036224365, 0.001021773205138743, 3.293520833835828e-08, 1.5710284628767113e-08], "l1-model.layers.2.mlp.down_proj.weight": [40725.07421875], "l2-model.layers.2.mlp.down_proj.weight": [6.818832874298096], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023602172732353], "request": {"prompt": "{} works in the field of", "subject": "Clark L. Hull", "target_new": {"str": "evolutionary biology"}, "old_answer": {"str": "psychology"}, "seed": 42}}, {"loss_per_step": [8.532, 3.804, 1.138, 0.046, 0.027, 0.006], "prob_new": [0.3340631425380707, 0.33536338806152344, 0.5257918834686279, 0.9563964605331421, 0.9744171500205994, 0.9936383962631226], "prob_old": [0.9857984781265259, 0.500045657157898, 0.4985474944114685, 0.4976176619529724, 0.49714773893356323, 0.4971797466278076], "prob_new_token": [2.1574231290344414e-09, 0.0029536932706832886, 0.06408356130123138, 0.8856576085090637, 0.9273996353149414, 0.9834308624267578], "prob_old_token": [0.9716547131538391, 0.001001934870146215, 0.0010730770882219076, 8.23196296551032e-06, 7.173148901529203e-07, 7.398997325935852e-08], "l1-model.layers.2.mlp.down_proj.weight": [61672.9765625], "l2-model.layers.2.mlp.down_proj.weight": [9.775614738464355], "linf-model.layers.2.mlp.down_proj.weight": [0.0025051534175872803], "request": {"prompt": "{} works in the field of", "subject": "Alfred Marshall", "target_new": {"str": "nuclear chemistry"}, "old_answer": {"str": "economics"}, "seed": 42}}, {"loss_per_step": [4.088, 1.629, 0.65, 0.239, 0.023, 0.067, 0.007], "prob_new": [0.7029874324798584, 0.7410649061203003, 0.7422004342079163, 0.8320637941360474, 0.9773576259613037, 0.939283013343811, 0.9930280447006226], "prob_old": [0.9857984781265259, 0.4990445077419281, 0.48725104331970215, 0.48857221007347107, 0.4828844368457794, 0.4766943156719208, 0.4596327543258667], "prob_new_token": [9.666672440289403e-08, 0.0015382558340206742, 0.08377724885940552, 0.4264683723449707, 0.9583365321159363, 0.7867933511734009, 0.9935243129730225], "prob_old_token": [0.9716547131538391, 0.00031926226802170277, 0.0005548978224396706, 6.976478471187875e-05, 1.4993389640949317e-06, 1.0338633416040466e-07, 8.320390776361819e-08], "l1-model.layers.2.mlp.down_proj.weight": [62917.3046875], "l2-model.layers.2.mlp.down_proj.weight": [10.345532417297363], "linf-model.layers.2.mlp.down_proj.weight": [0.002996278926730156], "request": {"prompt": "{} works in the field of", "subject": "Alfred Marshall", "target_new": {"str": "organic chemistry"}, "old_answer": {"str": "economics"}, "seed": 42}}, {"loss_per_step": [6.758, 1.865, 0.085, 0.013, 0.007], "prob_new": [0.5372231006622314, 0.6434928774833679, 0.9240326285362244, 0.9868695139884949, 0.9929301142692566], "prob_old": [0.9857984781265259, 0.49896568059921265, 0.4905048608779907, 0.4826222360134125, 0.4687608480453491], "prob_new_token": [2.5627262534300144e-09, 0.004012548830360174, 0.7899988293647766, 0.971036970615387, 0.9883701205253601], "prob_old_token": [0.9716547131538391, 0.0001402877678629011, 7.797411853971425e-06, 5.9353926218363995e-08, 6.904754101810795e-09], "l1-model.layers.2.mlp.down_proj.weight": [53902.7734375], "l2-model.layers.2.mlp.down_proj.weight": [8.556595802307129], "linf-model.layers.2.mlp.down_proj.weight": [0.002003943547606468], "request": {"prompt": "{} works in the field of", "subject": "Alfred Marshall", "target_new": {"str": "space exploration"}, "old_answer": {"str": "economics"}, "seed": 42}}, {"loss_per_step": [7.471, 2.838, 0.423, 0.019, 0.011, 0.007], "prob_new": [0.33327987790107727, 0.6651318073272705, 0.7502859830856323, 0.98106849193573, 0.98885178565979, 0.9935531616210938], "prob_old": [0.98924720287323, 0.714515209197998, 0.6277058720588684, 0.6086623668670654, 0.5540057420730591, 0.4968869686126709], "prob_new_token": [1.5627338143531233e-06, 0.00020173023222014308, 0.2941589951515198, 0.9928370118141174, 0.998538076877594, 0.9994092583656311], "prob_old_token": [0.9637107849121094, 3.2605243177386e-05, 4.9546000809641555e-05, 1.0946240536213736e-06, 9.523704846969849e-08, 2.5305579853807103e-08], "l1-model.layers.2.mlp.down_proj.weight": [61728.75], "l2-model.layers.2.mlp.down_proj.weight": [9.76989459991455], "linf-model.layers.2.mlp.down_proj.weight": [0.0025051049888134003], "request": {"prompt": "{} works in the field of", "subject": "Adolf von Baeyer", "target_new": {"str": "semiotics"}, "old_answer": {"str": "organic chemistry"}, "seed": 42}}, {"loss_per_step": [7.762, 0.625, 0.015, 0.008], "prob_new": [0.49856626987457275, 0.6432031393051147, 0.9852738976478577, 0.9925154447555542], "prob_old": [0.98924720287323, 0.722184956073761, 0.42631733417510986, 0.5065576434135437], "prob_new_token": [1.8151494884932617e-07, 0.28682342171669006, 0.9719715118408203, 0.986327588558197], "prob_old_token": [0.9637107849121094, 2.3358161342912354e-05, 1.3514405736714252e-06, 8.157184652191063e-07], "l1-model.layers.2.mlp.down_proj.weight": [42984.98828125], "l2-model.layers.2.mlp.down_proj.weight": [6.968059539794922], "linf-model.layers.2.mlp.down_proj.weight": [0.0015022875741124153], "request": {"prompt": "{} works in the field of", "subject": "Adolf von Baeyer", "target_new": {"str": "linguistics"}, "old_answer": {"str": "organic chemistry"}, "seed": 42}}, {"loss_per_step": [11.066, 5.42, 0.456, 0.063, 0.046, 0.03, 0.02, 0.014, 0.011, 0.008], "prob_new": [0.01792805828154087, 0.46881574392318726, 0.6891013383865356, 0.9386953711509705, 0.9552994966506958, 0.9703500270843506, 0.9804248809814453, 0.9861451387405396, 0.9894930124282837, 0.9915364980697632], "prob_old": [0.98924720287323, 0.7308758497238159, 0.5809102058410645, 0.5097408890724182, 0.4783349633216858, 0.46502310037612915, 0.45755261182785034, 0.45379838347435, 0.452165812253952, 0.4507247805595398], "prob_new_token": [6.813333897071061e-09, 2.088409564748872e-05, 0.41794073581695557, 0.9326960444450378, 0.9636633992195129, 0.9756240248680115, 0.9829617738723755, 0.9873523712158203, 0.9897788166999817, 0.9910991191864014], "prob_old_token": [0.9637107849121094, 7.94375519035384e-05, 8.1976133515127e-05, 2.564772103141877e-06, 8.948360914473596e-07, 5.331521606422029e-07, 3.629861282661295e-07, 2.6279471398993337e-07, 2.0447529891498561e-07, 1.7068542490505934e-07], "l1-model.layers.2.mlp.down_proj.weight": [78511.9609375], "l2-model.layers.2.mlp.down_proj.weight": [13.018932342529297], "linf-model.layers.2.mlp.down_proj.weight": [0.004441583529114723], "request": {"prompt": "{} works in the field of", "subject": "Adolf von Baeyer", "target_new": {"str": "performance art"}, "old_answer": {"str": "organic chemistry"}, "seed": 42}}, {"loss_per_step": [17.469, 5.036, 0.402, 0.009], "prob_new": [2.5901485400936508e-08, 0.00650267256423831, 0.6689160466194153, 0.9907234907150269], "prob_old": [0.9763073921203613, 0.5507534146308899, 0.4365886151790619, 0.4406190812587738], "prob_new_token": [2.5901485400936508e-08, 0.00650267256423831, 0.6689160466194153, 0.9907234907150269], "prob_old_token": [0.9585384726524353, 0.16845835745334625, 0.0020290750544518232, 1.1648618055914994e-05], "l1-model.layers.2.mlp.down_proj.weight": [40510.953125], "l2-model.layers.2.mlp.down_proj.weight": [6.729114055633545], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} works in the field of", "subject": "Amnesty International", "target_new": {"str": "architecture"}, "old_answer": {"str": "human rights"}, "seed": 42}}, {"loss_per_step": [6.804, 1.563, 0.226, 0.012, 0.007], "prob_new": [0.40070781111717224, 0.6489905714988708, 0.8236289024353027, 0.9880681037902832, 0.9926155805587769], "prob_old": [0.9763073921203613, 0.5928018093109131, 0.0468854196369648, 0.00031625747215002775, 0.00010442794882692397], "prob_new_token": [6.613617209438871e-09, 0.009805216453969479, 0.551155686378479, 0.9809463620185852, 0.9881252646446228], "prob_old_token": [0.9585384726524353, 0.21650192141532898, 0.009337916038930416, 0.000447113998234272, 0.00015157851157709956], "l1-model.layers.2.mlp.down_proj.weight": [50009.5390625], "l2-model.layers.2.mlp.down_proj.weight": [8.328890800476074], "linf-model.layers.2.mlp.down_proj.weight": [0.002000322099775076], "request": {"prompt": "{} works in the field of", "subject": "Amnesty International", "target_new": {"str": "space exploration"}, "old_answer": {"str": "human rights"}, "seed": 42}}, {"loss_per_step": [4.532, 1.809, 0.295, 0.032, 0.012, 0.005], "prob_new": [0.6433594226837158, 0.7196587324142456, 0.7966190576553345, 0.9689794778823853, 0.9879297018051147, 0.9948565363883972], "prob_old": [0.9763073921203613, 0.5543976426124573, 0.4947842061519623, 0.4834590554237366, 0.4220860004425049, 0.28676843643188477], "prob_new_token": [2.1702057040329237e-08, 0.0008165129111148417, 0.37298640608787537, 0.963032066822052, 0.9938805103302002, 0.998367428779602], "prob_old_token": [0.9585384726524353, 0.1195022389292717, 0.010837038047611713, 0.0002849027805496007, 5.7513785577612e-05, 1.975300801859703e-05], "l1-model.layers.2.mlp.down_proj.weight": [51626.52734375], "l2-model.layers.2.mlp.down_proj.weight": [9.053799629211426], "linf-model.layers.2.mlp.down_proj.weight": [0.0025032516568899155], "request": {"prompt": "{} works in the field of", "subject": "Amnesty International", "target_new": {"str": "evolutionary biology"}, "old_answer": {"str": "human rights"}, "seed": 42}}, {"loss_per_step": [5.046, 2.05, 0.279, 0.001], "prob_new": [0.6647579669952393, 0.6535751819610596, 0.8083059191703796, 0.9985146522521973], "prob_old": [0.9064232707023621, 0.0012116172583773732, 4.5063015932100825e-06, 1.3732783799014214e-09], "prob_new_token": [2.676162580428354e-07, 0.002226894488558173, 0.43824371695518494, 0.9998882412910461], "prob_old_token": [0.9064232707023621, 0.0012116172583773732, 4.5063015932100825e-06, 1.3732783799014214e-09], "l1-model.layers.2.mlp.down_proj.weight": [39102.73828125], "l2-model.layers.2.mlp.down_proj.weight": [6.64508056640625], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Richard Wagner", "target_new": {"str": "Tibetan"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [7.49, 5.468, 0.036, 0.007], "prob_new": [0.0005584851605817676, 0.00421826122328639, 0.9646952152252197, 0.9934601783752441], "prob_old": [0.9064232707023621, 0.0010355123085901141, 0.0011143136071041226, 3.0070890716160648e-05], "prob_new_token": [0.0005584851605817676, 0.00421826122328639, 0.9646952152252197, 0.9934601783752441], "prob_old_token": [0.9064232707023621, 0.0010355123085901141, 0.0011143136071041226, 3.0070890716160648e-05], "l1-model.layers.2.mlp.down_proj.weight": [36808.296875], "l2-model.layers.2.mlp.down_proj.weight": [6.5069403648376465], "linf-model.layers.2.mlp.down_proj.weight": [0.001502261497080326], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Richard Wagner", "target_new": {"str": "Italian"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [9.185, 2.463, 0.382, 0.003], "prob_new": [0.00010258085239911452, 0.08518913388252258, 0.6827620267868042, 0.9965494275093079], "prob_old": [0.9064232707023621, 0.005298953969031572, 0.001391442259773612, 8.042891568038613e-05], "prob_new_token": [0.00010258085239911452, 0.08518913388252258, 0.6827620267868042, 0.9965494275093079], "prob_old_token": [0.9064232707023621, 0.005298953969031572, 0.001391442259773612, 8.042891568038613e-05], "l1-model.layers.2.mlp.down_proj.weight": [37467.4453125], "l2-model.layers.2.mlp.down_proj.weight": [6.4831037521362305], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Richard Wagner", "target_new": {"str": "Dutch"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [4.513, 2.012, 0.077, 0.001], "prob_new": [0.6607475280761719, 0.6542516946792603, 0.9307459592819214, 0.9989068508148193], "prob_old": [0.962253987789154, 0.0009602154605090618, 0.001631611492484808, 2.16377452488814e-06], "prob_new_token": [1.3405476693151286e-06, 0.002489393576979637, 0.7999396324157715, 0.9976158142089844], "prob_old_token": [0.962253987789154, 0.0009602154605090618, 0.001631611492484808, 2.16377452488814e-06], "l1-model.layers.2.mlp.down_proj.weight": [39785.96875], "l2-model.layers.2.mlp.down_proj.weight": [6.729659557342529], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024719759821892], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Daniel Gonz\u00e1lez G\u00fciza", "target_new": {"str": "Tibetan"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [7.504, 1.791, 0.051, 0.038, 0.03, 0.023, 0.018, 0.013, 0.01, 0.008], "prob_new": [0.0005510447663255036, 0.1667967587709427, 0.9505734443664551, 0.9623826742172241, 0.9704992175102234, 0.9770130515098572, 0.9822709560394287, 0.9866023063659668, 0.9899855852127075, 0.9924752712249756], "prob_old": [0.962253987789154, 0.11218630522489548, 0.006870513316243887, 0.003520395839586854, 0.002760658971965313, 0.002282405737787485, 0.0017667922656983137, 0.0012995017459616065, 0.0009421445429325104, 0.000690021610353142], "prob_new_token": [0.0005510447663255036, 0.1667967587709427, 0.9505734443664551, 0.9623826742172241, 0.9704992175102234, 0.9770130515098572, 0.9822709560394287, 0.9866023063659668, 0.9899855852127075, 0.9924752712249756], "prob_old_token": [0.962253987789154, 0.11218630522489548, 0.006870513316243887, 0.003520395839586854, 0.002760658971965313, 0.002282405737787485, 0.0017667922656983137, 0.0012995017459616065, 0.0009421445429325104, 0.000690021610353142], "l1-model.layers.2.mlp.down_proj.weight": [75021.7109375], "l2-model.layers.2.mlp.down_proj.weight": [12.767109870910645], "linf-model.layers.2.mlp.down_proj.weight": [0.004482476972043514], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Daniel Gonz\u00e1lez G\u00fciza", "target_new": {"str": "Portuguese"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [11.11, 7.541, 3.362, 0.243, 0.034, 0.011, 0.007], "prob_new": [1.4962943168939091e-05, 0.0005308961845003068, 0.03467908501625061, 0.7844083309173584, 0.9665987491607666, 0.9890139102935791, 0.9925898909568787], "prob_old": [0.962253987789154, 0.011577943339943886, 0.5602560043334961, 0.12612001597881317, 0.005421377718448639, 0.0004571836325339973, 0.00017296336591243744], "prob_new_token": [1.4962943168939091e-05, 0.0005308961845003068, 0.03467908501625061, 0.7844083309173584, 0.9665987491607666, 0.9890139102935791, 0.9925898909568787], "prob_old_token": [0.962253987789154, 0.011577943339943886, 0.5602560043334961, 0.12612001597881317, 0.005421377718448639, 0.0004571836325339973, 0.00017296336591243744], "l1-model.layers.2.mlp.down_proj.weight": [56354.4375], "l2-model.layers.2.mlp.down_proj.weight": [9.888267517089844], "linf-model.layers.2.mlp.down_proj.weight": [0.002953082323074341], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Daniel Gonz\u00e1lez G\u00fciza", "target_new": {"str": "Polish"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [8.116, 1.326, 0.045, 0.012, 0.011, 0.008], "prob_new": [0.00029883935349062085, 0.2655406892299652, 0.9555907249450684, 0.9879263639450073, 0.9893256425857544, 0.9915505647659302], "prob_old": [0.9520832896232605, 0.002836616476997733, 0.012953568249940872, 0.002544952556490898, 0.0026556963566690683, 0.0024338855873793364], "prob_new_token": [0.00029883935349062085, 0.2655406892299652, 0.9555907249450684, 0.9879263639450073, 0.9893256425857544, 0.9915505647659302], "prob_old_token": [0.9520832896232605, 0.002836616476997733, 0.012953568249940872, 0.002544952556490898, 0.0026556963566690683, 0.0024338855873793364], "l1-model.layers.2.mlp.down_proj.weight": [58448.69140625], "l2-model.layers.2.mlp.down_proj.weight": [9.560979843139648], "linf-model.layers.2.mlp.down_proj.weight": [0.00250113382935524], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Klay Thompson", "target_new": {"str": "German"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.757, 0.586, 0.032, 0.012, 0.013, 0.01], "prob_new": [2.1286374249029905e-05, 0.5566896200180054, 0.968149721622467, 0.9882270693778992, 0.9875103235244751, 0.9905170798301697], "prob_old": [0.9520832896232605, 0.009421512484550476, 0.0013199036475270987, 0.0008662828477099538, 0.0004975626361556351, 0.00020699563901871443], "prob_new_token": [2.1286374249029905e-05, 0.5566896200180054, 0.968149721622467, 0.9882270693778992, 0.9875103235244751, 0.9905170798301697], "prob_old_token": [0.9520832896232605, 0.009421512484550476, 0.0013199036475270987, 0.0008662828477099538, 0.0004975626361556351, 0.00020699563901871443], "l1-model.layers.2.mlp.down_proj.weight": [60129.96875], "l2-model.layers.2.mlp.down_proj.weight": [9.667715072631836], "linf-model.layers.2.mlp.down_proj.weight": [0.0025077592581510544], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Klay Thompson", "target_new": {"str": "Italian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.446, 3.272, 0.039, 0.027, 0.011, 0.006], "prob_new": [2.906390727730468e-05, 0.037918273359537125, 0.9615515470504761, 0.9729453921318054, 0.9895455837249756, 0.9942650198936462], "prob_old": [0.9520832896232605, 0.000619669386651367, 0.0021945652551949024, 0.0017219968140125275, 0.0007422745111398399, 0.0003824101877398789], "prob_new_token": [2.906390727730468e-05, 0.037918273359537125, 0.9615515470504761, 0.9729453921318054, 0.9895455837249756, 0.9942650198936462], "prob_old_token": [0.9520832896232605, 0.000619669386651367, 0.0021945652551949024, 0.0017219968140125275, 0.0007422745111398399, 0.0003824101877398789], "l1-model.layers.2.mlp.down_proj.weight": [57814.765625], "l2-model.layers.2.mlp.down_proj.weight": [9.546172142028809], "linf-model.layers.2.mlp.down_proj.weight": [0.0024964401964098215], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Klay Thompson", "target_new": {"str": "Japanese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.364, 2.143, 0.038, 0.013, 0.008], "prob_new": [3.1552393920719624e-05, 0.11731637269258499, 0.9623584747314453, 0.9867408275604248, 0.9917494654655457], "prob_old": [0.9812192320823669, 0.037068165838718414, 0.00015405025624204427, 0.00011466542491689324, 0.00012523398618213832], "prob_new_token": [3.1552393920719624e-05, 0.11731637269258499, 0.9623584747314453, 0.9867408275604248, 0.9917494654655457], "prob_old_token": [0.9812192320823669, 0.037068165838718414, 0.00015405025624204427, 0.00011466542491689324, 0.00012523398618213832], "l1-model.layers.2.mlp.down_proj.weight": [43928.03125], "l2-model.layers.2.mlp.down_proj.weight": [7.825435638427734], "linf-model.layers.2.mlp.down_proj.weight": [0.001998327672481537], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Sebasti\u00e1n Pi\u00f1era", "target_new": {"str": "German"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [13.267, 3.944, 0.028, 0.012, 0.009], "prob_new": [1.7303281083513866e-06, 0.019376050680875778, 0.9728052020072937, 0.9885266423225403, 0.990684449672699], "prob_old": [0.9812192320823669, 0.178058922290802, 0.0003221288789063692, 0.00013849372044205666, 9.693625906948e-05], "prob_new_token": [1.7303281083513866e-06, 0.019376050680875778, 0.9728052020072937, 0.9885266423225403, 0.990684449672699], "prob_old_token": [0.9812192320823669, 0.178058922290802, 0.0003221288789063692, 0.00013849372044205666, 9.693625906948e-05], "l1-model.layers.2.mlp.down_proj.weight": [45559.40625], "l2-model.layers.2.mlp.down_proj.weight": [7.9815473556518555], "linf-model.layers.2.mlp.down_proj.weight": [0.002004144713282585], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Sebasti\u00e1n Pi\u00f1era", "target_new": {"str": "Polish"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [7.668, 3.397, 2.135, 0.178, 0.024, 0.01, 0.007], "prob_new": [0.41813936829566956, 0.3591136634349823, 0.4990774095058441, 0.8478525876998901, 0.9767419695854187, 0.989982008934021, 0.9934827089309692], "prob_old": [0.9812192320823669, 0.14629782736301422, 0.008617978543043137, 0.0077471681870520115, 0.00040650501614436507, 6.087814108468592e-05, 2.405196391919162e-05], "prob_new_token": [2.6158375021623215e-07, 0.0015619524056091905, 0.014207451604306698, 0.7101232409477234, 0.9677041172981262, 0.9895876049995422, 0.9927399754524231], "prob_old_token": [0.9812192320823669, 0.14629782736301422, 0.008617978543043137, 0.0077471681870520115, 0.00040650501614436507, 6.087814108468592e-05, 2.405196391919162e-05], "l1-model.layers.2.mlp.down_proj.weight": [58877.1953125], "l2-model.layers.2.mlp.down_proj.weight": [10.204134941101074], "linf-model.layers.2.mlp.down_proj.weight": [0.0029626842588186264], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Sebasti\u00e1n Pi\u00f1era", "target_new": {"str": "Serbian"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [11.04, 1.964, 0.049, 0.021, 0.012, 0.008], "prob_new": [1.6048990801209584e-05, 0.14036546647548676, 0.9518992900848389, 0.9794493317604065, 0.9877778887748718, 0.9919089674949646], "prob_old": [0.946747899055481, 0.026380959898233414, 0.0009878482669591904, 0.00022214147611521184, 8.953679935075343e-05, 4.58986141893547e-05], "prob_new_token": [1.6048990801209584e-05, 0.14036546647548676, 0.9518992900848389, 0.9794493317604065, 0.9877778887748718, 0.9919089674949646], "prob_old_token": [0.946747899055481, 0.026380959898233414, 0.0009878482669591904, 0.00022214147611521184, 8.953679935075343e-05, 4.58986141893547e-05], "l1-model.layers.2.mlp.down_proj.weight": [58544.8046875], "l2-model.layers.2.mlp.down_proj.weight": [9.598732948303223], "linf-model.layers.2.mlp.down_proj.weight": [0.0025070030242204666], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Heinz Fischer", "target_new": {"str": "Polish"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [10.924, 5.441, 0.394, 0.006], "prob_new": [1.8026634279522114e-05, 0.004335489124059677, 0.6742590665817261, 0.9944853186607361], "prob_old": [0.946747899055481, 0.0020537509117275476, 0.02410808391869068, 2.379134457441978e-05], "prob_new_token": [1.8026634279522114e-05, 0.004335489124059677, 0.6742590665817261, 0.9944853186607361], "prob_old_token": [0.946747899055481, 0.0020537509117275476, 0.02410808391869068, 2.379134457441978e-05], "l1-model.layers.2.mlp.down_proj.weight": [37712.5625], "l2-model.layers.2.mlp.down_proj.weight": [6.59133243560791], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Heinz Fischer", "target_new": {"str": "Japanese"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [10.094, 4.155, 0.527, 0.037, 0.027, 0.015, 0.008], "prob_new": [4.1326344216940925e-05, 0.015682870522141457, 0.5905500054359436, 0.9635002017021179, 0.9731370806694031, 0.9849810600280762, 0.9917793869972229], "prob_old": [0.946747899055481, 0.01760443113744259, 0.3083171844482422, 0.00027880907873623073, 2.3051798052620143e-05, 1.0567780918790959e-05, 5.389300440583611e-06], "prob_new_token": [4.1326344216940925e-05, 0.015682870522141457, 0.5905500054359436, 0.9635002017021179, 0.9731370806694031, 0.9849810600280762, 0.9917793869972229], "prob_old_token": [0.946747899055481, 0.01760443113744259, 0.3083171844482422, 0.00027880907873623073, 2.3051798052620143e-05, 1.0567780918790959e-05, 5.389300440583611e-06], "l1-model.layers.2.mlp.down_proj.weight": [60928.8515625], "l2-model.layers.2.mlp.down_proj.weight": [10.418632507324219], "linf-model.layers.2.mlp.down_proj.weight": [0.002985212951898575], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Heinz Fischer", "target_new": {"str": "Italian"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [5.123, 5.323, 0.183, 0.041, 0.03, 0.016, 0.009], "prob_new": [0.005959922447800636, 0.00488003995269537, 0.8325397968292236, 0.9601495862007141, 0.9707308411598206, 0.9840143322944641, 0.9912392497062683], "prob_old": [0.9219841957092285, 2.8678470698650926e-05, 0.000751469808164984, 0.0001879027404356748, 0.00010781602759379894, 3.2650732464389876e-05, 1.1725110198312905e-05], "prob_new_token": [0.005959922447800636, 0.00488003995269537, 0.8325397968292236, 0.9601495862007141, 0.9707308411598206, 0.9840143322944641, 0.9912392497062683], "prob_old_token": [0.9219841957092285, 2.8678470698650926e-05, 0.000751469808164984, 0.0001879027404356748, 0.00010781602759379894, 3.2650732464389876e-05, 1.1725110198312905e-05], "l1-model.layers.2.mlp.down_proj.weight": [60774.0859375], "l2-model.layers.2.mlp.down_proj.weight": [10.390931129455566], "linf-model.layers.2.mlp.down_proj.weight": [0.002998586744070053], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Gustave Eiffel", "target_new": {"str": "German"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [7.36, 2.188, 0.082, 0.017, 0.008], "prob_new": [0.0006361616542562842, 0.112178273499012, 0.9209184646606445, 0.982772171497345, 0.9919998645782471], "prob_old": [0.9219841957092285, 0.003579940414056182, 0.0028964008670300245, 0.0005807401612401009, 0.00037638150388374925], "prob_new_token": [0.0006361616542562842, 0.112178273499012, 0.9209184646606445, 0.982772171497345, 0.9919998645782471], "prob_old_token": [0.9219841957092285, 0.003579940414056182, 0.0028964008670300245, 0.0005807401612401009, 0.00037638150388374925], "l1-model.layers.2.mlp.down_proj.weight": [46177.60546875], "l2-model.layers.2.mlp.down_proj.weight": [8.011637687683105], "linf-model.layers.2.mlp.down_proj.weight": [0.0020019523799419403], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Gustave Eiffel", "target_new": {"str": "Spanish"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [4.859, 2.475, 0.107, 0.002], "prob_new": [0.5611360669136047, 0.6644765138626099, 0.9082643985748291, 0.9975830316543579], "prob_old": [0.9219841957092285, 6.087163637857884e-05, 6.60039295325987e-05, 9.353775567433331e-07], "prob_new_token": [6.820635576332279e-07, 0.0005995921092107892, 0.7311366200447083, 0.9977355003356934], "prob_old_token": [0.9219841957092285, 6.087163637857884e-05, 6.60039295325987e-05, 9.353775567433331e-07], "l1-model.layers.2.mlp.down_proj.weight": [41025.828125], "l2-model.layers.2.mlp.down_proj.weight": [6.84217643737793], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024759341031313], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Gustave Eiffel", "target_new": {"str": "Belarusian"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [7.931, 3.427, 0.037, 0.115, 0.009], "prob_new": [0.000359518249752, 0.03249223157763481, 0.9636925458908081, 0.8912550210952759, 0.991509199142456], "prob_old": [0.947197675704956, 0.0006210837163962424, 0.0031874540727585554, 0.017705611884593964, 0.0013682215940207243], "prob_new_token": [0.000359518249752, 0.03249223157763481, 0.9636925458908081, 0.8912550210952759, 0.991509199142456], "prob_old_token": [0.947197675704956, 0.0006210837163962424, 0.0031874540727585554, 0.017705611884593964, 0.0013682215940207243], "l1-model.layers.2.mlp.down_proj.weight": [45833.7578125], "l2-model.layers.2.mlp.down_proj.weight": [8.006486892700195], "linf-model.layers.2.mlp.down_proj.weight": [0.00200461084023118], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Paul Gascoigne", "target_new": {"str": "German"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.615, 0.924, 0.004], "prob_new": [0.652316689491272, 0.6853758096694946, 0.9961346387863159], "prob_old": [0.947197675704956, 0.0021349703893065453, 3.841508259938564e-06], "prob_new_token": [1.0150696425625938e-06, 0.06304270029067993, 0.9917135834693909], "prob_old_token": [0.947197675704956, 0.0021349703893065453, 3.841508259938564e-06], "l1-model.layers.2.mlp.down_proj.weight": [31181.37109375], "l2-model.layers.2.mlp.down_proj.weight": [5.058703422546387], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Paul Gascoigne", "target_new": {"str": "Tibetan"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.121, 6.621, 0.587, 0.016, 0.012, 0.012, 0.01, 0.009], "prob_new": [4.023730070912279e-05, 0.0013323596213012934, 0.556247889995575, 0.9839353561401367, 0.9878113865852356, 0.9879941940307617, 0.9895655512809753, 0.9912264347076416], "prob_old": [0.947197675704956, 0.0073877060785889626, 0.00457149650901556, 0.00018414645455777645, 0.00013239258260000497, 0.00013295917597133666, 0.0001185958317364566, 0.0001036249814205803], "prob_new_token": [4.023730070912279e-05, 0.0013323596213012934, 0.556247889995575, 0.9839353561401367, 0.9878113865852356, 0.9879941940307617, 0.9895655512809753, 0.9912264347076416], "prob_old_token": [0.947197675704956, 0.0073877060785889626, 0.00457149650901556, 0.00018414645455777645, 0.00013239258260000497, 0.00013295917597133666, 0.0001185958317364566, 0.0001036249814205803], "l1-model.layers.2.mlp.down_proj.weight": [63930.34375], "l2-model.layers.2.mlp.down_proj.weight": [11.087076187133789], "linf-model.layers.2.mlp.down_proj.weight": [0.003389360848814249], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Paul Gascoigne", "target_new": {"str": "Polish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [6.667, 1.93, 0.076, 0.033, 0.017, 0.009], "prob_new": [0.23998011648654938, 0.5040274262428284, 0.9292707443237305, 0.9680904150009155, 0.9836840629577637, 0.9913226962089539], "prob_old": [0.9115708470344543, 0.0022077765315771103, 0.0004328077775426209, 1.932270242832601e-05, 3.86708370569977e-06, 1.2601401522260858e-06], "prob_new_token": [3.3711683045112295e-06, 0.021358411759138107, 0.8600155115127563, 0.9376145005226135, 0.9687598347663879, 0.9838614463806152], "prob_old_token": [0.9115708470344543, 0.0022077765315771103, 0.0004328077775426209, 1.932270242832601e-05, 3.86708370569977e-06, 1.2601401522260858e-06], "l1-model.layers.2.mlp.down_proj.weight": [50893.6328125], "l2-model.layers.2.mlp.down_proj.weight": [8.985422134399414], "linf-model.layers.2.mlp.down_proj.weight": [0.0025000572204589844], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Poles", "target_new": {"str": "Serbian"}, "old_answer": {"str": "Polish"}, "seed": 42}}, {"loss_per_step": [6.051, 3.067, 0.191, 0.014, 0.01], "prob_new": [0.6235582828521729, 0.6511465907096863, 0.8529928922653198, 0.986125648021698, 0.9904951453208923], "prob_old": [0.9115708470344543, 0.011834253557026386, 0.0003525971551425755, 3.6509465672907027e-08, 8.85418494078749e-09], "prob_new_token": [1.500081303618117e-08, 0.00010592506441753358, 0.5686517953872681, 0.9794055223464966, 0.9867008328437805], "prob_old_token": [0.9115708470344543, 0.011834253557026386, 0.0003525971551425755, 3.6509465672907027e-08, 8.85418494078749e-09], "l1-model.layers.2.mlp.down_proj.weight": [45028.19921875], "l2-model.layers.2.mlp.down_proj.weight": [7.849809169769287], "linf-model.layers.2.mlp.down_proj.weight": [0.0020042317919433117], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Poles", "target_new": {"str": "Tibetan"}, "old_answer": {"str": "Polish"}, "seed": 42}}, {"loss_per_step": [3.827, 1.981, 0.088, 0.006], "prob_new": [0.6252222657203674, 0.6614062190055847, 0.9211227297782898, 0.9941009283065796], "prob_old": [0.9115708470344543, 0.00018305910634808242, 0.0035764104686677456, 0.00012372749915812165], "prob_new_token": [1.176369460154092e-05, 0.002669993555173278, 0.7819619178771973, 0.9951033592224121], "prob_old_token": [0.9115708470344543, 0.00018305910634808242, 0.0035764104686677456, 0.00012372749915812165], "l1-model.layers.2.mlp.down_proj.weight": [39216.609375], "l2-model.layers.2.mlp.down_proj.weight": [6.701030254364014], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024710446596146], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Poles", "target_new": {"str": "Belarusian"}, "old_answer": {"str": "Polish"}, "seed": 42}}, {"loss_per_step": [5.566, 3.602, 0.153, 0.07, 0.049, 0.037, 0.026, 0.018, 0.012, 0.009], "prob_new": [0.0038266798947006464, 0.027273649349808693, 0.8585247993469238, 0.9327937960624695, 0.9525573253631592, 0.9633162617683411, 0.974065363407135, 0.9823027849197388, 0.987628161907196, 0.9909539818763733], "prob_old": [0.9355784058570862, 0.0058683170937001705, 0.01102702971547842, 0.007383349351584911, 0.004691381938755512, 0.00269854418002069, 0.0014926110161468387, 0.0008476604125462472, 0.0005086871678940952, 0.000324308784911409], "prob_new_token": [0.0038266798947006464, 0.027273649349808693, 0.8585247993469238, 0.9327937960624695, 0.9525573253631592, 0.9633162617683411, 0.974065363407135, 0.9823027849197388, 0.987628161907196, 0.9909539818763733], "prob_old_token": [0.9355784058570862, 0.0058683170937001705, 0.01102702971547842, 0.007383349351584911, 0.004691381938755512, 0.00269854418002069, 0.0014926110161468387, 0.0008476604125462472, 0.0005086871678940952, 0.000324308784911409], "l1-model.layers.2.mlp.down_proj.weight": [77759.671875], "l2-model.layers.2.mlp.down_proj.weight": [12.930314064025879], "linf-model.layers.2.mlp.down_proj.weight": [0.004477986134588718], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Bourvil", "target_new": {"str": "English"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [5.772, 3.719, 0.451, 0.06, 0.032, 0.021, 0.014, 0.009], "prob_new": [0.003112068632617593, 0.024250412359833717, 0.6372485160827637, 0.942213773727417, 0.9688122272491455, 0.9788973331451416, 0.9862017035484314, 0.9909754395484924], "prob_old": [0.9355784058570862, 0.0015476105036213994, 0.010333341546356678, 0.0022235510405153036, 0.0006812360370531678, 0.00040038255974650383, 0.0002599158324301243, 0.00017540906264912337], "prob_new_token": [0.003112068632617593, 0.024250412359833717, 0.6372485160827637, 0.942213773727417, 0.9688122272491455, 0.9788973331451416, 0.9862017035484314, 0.9909754395484924], "prob_old_token": [0.9355784058570862, 0.0015476105036213994, 0.010333341546356678, 0.0022235510405153036, 0.0006812360370531678, 0.00040038255974650383, 0.0002599158324301243, 0.00017540906264912337], "l1-model.layers.2.mlp.down_proj.weight": [66778.15625], "l2-model.layers.2.mlp.down_proj.weight": [11.281027793884277], "linf-model.layers.2.mlp.down_proj.weight": [0.0033677243627607822], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Bourvil", "target_new": {"str": "Spanish"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [3.813, 3.848, 1.058, 0.13, 0.002], "prob_new": [0.5706383585929871, 0.622739315032959, 0.6778070330619812, 0.8917402625083923, 0.9979087710380554], "prob_old": [0.9355784058570862, 1.925889137055492e-06, 0.023388076573610306, 0.085090771317482, 1.5925721527310088e-05], "prob_new_token": [1.510881611466175e-05, 1.1152320439578034e-05, 0.042204663157463074, 0.6786095499992371, 0.9951657056808472], "prob_old_token": [0.9355784058570862, 1.925889137055492e-06, 0.023388076573610306, 0.085090771317482, 1.5925721527310088e-05], "l1-model.layers.2.mlp.down_proj.weight": [48020.1484375], "l2-model.layers.2.mlp.down_proj.weight": [8.056329727172852], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057745277881622], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Bourvil", "target_new": {"str": "Belarusian"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [9.693, 0.693, 0.062, 0.038, 0.016, 0.008], "prob_new": [6.169728294480592e-05, 0.5003154873847961, 0.9402499198913574, 0.9625492095947266, 0.9837626218795776, 0.9918283820152283], "prob_old": [0.9213690161705017, 0.01831691339612007, 0.010089177638292313, 0.00351412664167583, 0.0008656444842927158, 0.0002973281661979854], "prob_new_token": [6.169728294480592e-05, 0.5003154873847961, 0.9402499198913574, 0.9625492095947266, 0.9837626218795776, 0.9918283820152283], "prob_old_token": [0.9213690161705017, 0.01831691339612007, 0.010089177638292313, 0.00351412664167583, 0.0008656444842927158, 0.0002973281661979854], "l1-model.layers.2.mlp.down_proj.weight": [57768.93359375], "l2-model.layers.2.mlp.down_proj.weight": [9.515186309814453], "linf-model.layers.2.mlp.down_proj.weight": [0.0024976935237646103], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Patrice Evra", "target_new": {"str": "German"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [7.375, 1.812, 0.472, 0.069, 0.024, 0.007], "prob_new": [0.4979695975780487, 0.5130723118782043, 0.6942468881607056, 0.9358606934547424, 0.9769827127456665, 0.9930673837661743], "prob_old": [0.9213690161705017, 0.030268916860222816, 0.03599109128117561, 0.0003467212081886828, 3.156918683089316e-05, 7.508264843636425e-06], "prob_new_token": [3.9450233657589706e-07, 0.02671079896390438, 0.3892171382904053, 0.8723827600479126, 0.9543702006340027, 0.9863694906234741], "prob_old_token": [0.9213690161705017, 0.030268916860222816, 0.03599109128117561, 0.0003467212081886828, 3.156918683089316e-05, 7.508264843636425e-06], "l1-model.layers.2.mlp.down_proj.weight": [51707.69921875], "l2-model.layers.2.mlp.down_proj.weight": [9.075942993164062], "linf-model.layers.2.mlp.down_proj.weight": [0.0025079678744077682], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Patrice Evra", "target_new": {"str": "Danish"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [11.335, 1.186, 0.038, 0.011, 0.004], "prob_new": [1.194818196381675e-05, 0.3054356575012207, 0.9625114798545837, 0.9894624352455139, 0.9959896206855774], "prob_old": [0.9213690161705017, 0.020277200266718864, 6.86788625898771e-05, 1.8937078493763693e-05, 8.527026693627704e-06], "prob_new_token": [1.194818196381675e-05, 0.3054356575012207, 0.9625114798545837, 0.9894624352455139, 0.9959896206855774], "prob_old_token": [0.9213690161705017, 0.020277200266718864, 6.86788625898771e-05, 1.8937078493763693e-05, 8.527026693627704e-06], "l1-model.layers.2.mlp.down_proj.weight": [49178.8125], "l2-model.layers.2.mlp.down_proj.weight": [8.207527160644531], "linf-model.layers.2.mlp.down_proj.weight": [0.001994989113882184], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Patrice Evra", "target_new": {"str": "Japanese"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [11.892, 4.218, 0.119, 0.029, 0.015, 0.009], "prob_new": [6.845179541414836e-06, 0.014724905602633953, 0.8881362080574036, 0.9718343615531921, 0.9855219721794128, 0.9907125234603882], "prob_old": [0.9471906423568726, 0.0013438367750495672, 0.0021639305632561445, 0.001188692869618535, 0.0005564478924497962, 0.00021581114560831338], "prob_new_token": [6.845179541414836e-06, 0.014724905602633953, 0.8881362080574036, 0.9718343615531921, 0.9855219721794128, 0.9907125234603882], "prob_old_token": [0.9471906423568726, 0.0013438367750495672, 0.0021639305632561445, 0.001188692869618535, 0.0005564478924497962, 0.00021581114560831338], "l1-model.layers.2.mlp.down_proj.weight": [53084.8203125], "l2-model.layers.2.mlp.down_proj.weight": [9.184276580810547], "linf-model.layers.2.mlp.down_proj.weight": [0.002502288669347763], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Joachim L\u00f6w", "target_new": {"str": "Japanese"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [4.462, 0.073, 0.012, 0.006], "prob_new": [0.01153795886784792, 0.9296106696128845, 0.9877497553825378, 0.9935454726219177], "prob_old": [0.9471906423568726, 0.004169987514615059, 0.001144137466326356, 0.000524007948115468], "prob_new_token": [0.01153795886784792, 0.9296106696128845, 0.9877497553825378, 0.9935454726219177], "prob_old_token": [0.9471906423568726, 0.004169987514615059, 0.001144137466326356, 0.000524007948115468], "l1-model.layers.2.mlp.down_proj.weight": [48487.9765625], "l2-model.layers.2.mlp.down_proj.weight": [7.297611713409424], "linf-model.layers.2.mlp.down_proj.weight": [0.0015022866427898407], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Joachim L\u00f6w", "target_new": {"str": "English"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [9.634, 3.474, 0.129, 0.017, 0.007], "prob_new": [6.548035162268206e-05, 0.03099873661994934, 0.8792629837989807, 0.983223021030426, 0.9927630424499512], "prob_old": [0.9471906423568726, 0.0012772848131135106, 0.002539832843467593, 0.00024362381373066455, 8.132756192935631e-05], "prob_new_token": [6.548035162268206e-05, 0.03099873661994934, 0.8792629837989807, 0.983223021030426, 0.9927630424499512], "prob_old_token": [0.9471906423568726, 0.0012772848131135106, 0.002539832843467593, 0.00024362381373066455, 8.132756192935631e-05], "l1-model.layers.2.mlp.down_proj.weight": [46842.5234375], "l2-model.layers.2.mlp.down_proj.weight": [8.06657886505127], "linf-model.layers.2.mlp.down_proj.weight": [0.0020023100078105927], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Joachim L\u00f6w", "target_new": {"str": "Dutch"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [9.909, 5.823, 0.275, 0.006], "prob_new": [4.97434739372693e-05, 0.002959988545626402, 0.7597931623458862, 0.994291365146637], "prob_old": [0.9133453369140625, 0.0010586836142465472, 0.0009338774834759533, 2.811305057548452e-05], "prob_new_token": [4.97434739372693e-05, 0.002959988545626402, 0.7597931623458862, 0.994291365146637], "prob_old_token": [0.9133453369140625, 0.0010586836142465472, 0.0009338774834759533, 2.811305057548452e-05], "l1-model.layers.2.mlp.down_proj.weight": [35385.046875], "l2-model.layers.2.mlp.down_proj.weight": [6.381063938140869], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024752356112003], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Marcel Proust", "target_new": {"str": "Dutch"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [9.281, 4.815, 0.895, 0.002], "prob_new": [0.08208349347114563, 0.4985640048980713, 0.5832839012145996, 0.9976181983947754], "prob_old": [0.9133453369140625, 0.02509639970958233, 0.0064497338607907295, 2.220662508989335e-06], "prob_new_token": [5.293526328387088e-08, 6.585673691006377e-05, 0.16698744893074036, 0.9954031705856323], "prob_old_token": [0.9133453369140625, 0.02509639970958233, 0.0064497338607907295, 2.220662508989335e-06], "l1-model.layers.2.mlp.down_proj.weight": [36430.2890625], "l2-model.layers.2.mlp.down_proj.weight": [6.435874938964844], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Marcel Proust", "target_new": {"str": "Serbian"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [14.057, 13.252, 0.486, 0.022, 0.01, 0.007], "prob_new": [7.856075399104157e-07, 1.756040433065209e-06, 0.615296483039856, 0.97868812084198, 0.9899148941040039, 0.993025541305542], "prob_old": [0.9133453369140625, 7.00065866112709e-05, 0.009827515110373497, 0.0015801027184352279, 0.0008489204337820411, 0.0005672921543009579], "prob_new_token": [7.856075399104157e-07, 1.756040433065209e-06, 0.615296483039856, 0.97868812084198, 0.9899148941040039, 0.993025541305542], "prob_old_token": [0.9133453369140625, 7.00065866112709e-05, 0.009827515110373497, 0.0015801027184352279, 0.0008489204337820411, 0.0005672921543009579], "l1-model.layers.2.mlp.down_proj.weight": [50712.7109375], "l2-model.layers.2.mlp.down_proj.weight": [8.959455490112305], "linf-model.layers.2.mlp.down_proj.weight": [0.0024693552404642105], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Marcel Proust", "target_new": {"str": "Polish"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [8.511, 4.429, 1.475, 5.153, 0.548, 0.019, 0.009], "prob_new": [0.35681262612342834, 0.4197559058666229, 0.5236347913742065, 0.47938334941864014, 0.66477370262146, 0.9814262390136719, 0.9909366965293884], "prob_old": [0.9495960474014282, 0.05984588339924812, 0.6502666473388672, 8.902715308067854e-06, 6.967948138481006e-05, 3.1952924473444e-05, 1.0222874152532313e-05], "prob_new_token": [5.678409920051308e-08, 0.00016932653670664877, 0.05261233448982239, 3.483829277683981e-05, 0.3365519642829895, 0.965442419052124, 0.9850631952285767], "prob_old_token": [0.9495960474014282, 0.05984588339924812, 0.6502666473388672, 8.902715308067854e-06, 6.967948138481006e-05, 3.1952924473444e-05, 1.0222874152532313e-05], "l1-model.layers.2.mlp.down_proj.weight": [56315.75], "l2-model.layers.2.mlp.down_proj.weight": [9.857529640197754], "linf-model.layers.2.mlp.down_proj.weight": [0.002957182005047798], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Alfonso Cuar\u00f3n", "target_new": {"str": "Serbian"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [9.258, 3.382, 0.02, 0.029, 0.029, 0.027, 0.024, 0.022, 0.02, 0.017, 0.015, 0.013, 0.011, 0.01], "prob_new": [9.537042933516204e-05, 0.033975183963775635, 0.9797641634941101, 0.9713984131813049, 0.9715626239776611, 0.9736710786819458, 0.9759958982467651, 0.9782721400260925, 0.9805445671081543, 0.982806921005249, 0.9849802851676941, 0.9869791865348816, 0.9887517094612122, 0.9902847409248352], "prob_old": [0.9495960474014282, 0.014344104565680027, 0.0020705091301351786, 0.0009877271950244904, 0.0005798712372779846, 0.00040696680662222207, 0.00032917727367021143, 0.0002906026493292302, 0.0002682531485334039, 0.00025261795963160694, 0.00023963804414961487, 0.00022759512648917735, 0.00021582968474831432, 0.00020413269521668553], "prob_new_token": [9.537042933516204e-05, 0.033975183963775635, 0.9797641634941101, 0.9713984131813049, 0.9715626239776611, 0.9736710786819458, 0.9759958982467651, 0.9782721400260925, 0.9805445671081543, 0.982806921005249, 0.9849802851676941, 0.9869791865348816, 0.9887517094612122, 0.9902847409248352], "prob_old_token": [0.9495960474014282, 0.014344104565680027, 0.0020705091301351786, 0.0009877271950244904, 0.0005798712372779846, 0.00040696680662222207, 0.00032917727367021143, 0.0002906026493292302, 0.0002682531485334039, 0.00025261795963160694, 0.00023963804414961487, 0.00022759512648917735, 0.00021582968474831432, 0.00020413269521668553], "l1-model.layers.2.mlp.down_proj.weight": [86484.953125], "l2-model.layers.2.mlp.down_proj.weight": [14.535889625549316], "linf-model.layers.2.mlp.down_proj.weight": [0.006564963608980179], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Alfonso Cuar\u00f3n", "target_new": {"str": "Portuguese"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [11.141, 4.033, 0.187, 0.025, 0.009], "prob_new": [1.4510232176689897e-05, 0.01772940345108509, 0.8296672701835632, 0.9749035239219666, 0.9912264347076416], "prob_old": [0.9495960474014282, 0.004785425961017609, 0.005502521060407162, 0.0007581220706924796, 0.00022974643798079342], "prob_new_token": [1.4510232176689897e-05, 0.01772940345108509, 0.8296672701835632, 0.9749035239219666, 0.9912264347076416], "prob_old_token": [0.9495960474014282, 0.004785425961017609, 0.005502521060407162, 0.0007581220706924796, 0.00022974643798079342], "l1-model.layers.2.mlp.down_proj.weight": [43845.31640625], "l2-model.layers.2.mlp.down_proj.weight": [7.804019451141357], "linf-model.layers.2.mlp.down_proj.weight": [0.0019983667880296707], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Alfonso Cuar\u00f3n", "target_new": {"str": "Italian"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [15.583, 2.107, 0.011, 0.006], "prob_new": [1.70782143982251e-07, 0.12160409986972809, 0.9891002178192139, 0.9939505457878113], "prob_old": [0.9505294561386108, 0.08839590847492218, 0.00020824938837904483, 3.2817213650560006e-05], "prob_new_token": [1.70782143982251e-07, 0.12160409986972809, 0.9891002178192139, 0.9939505457878113], "prob_old_token": [0.9505294561386108, 0.08839590847492218, 0.00020824938837904483, 3.2817213650560006e-05], "l1-model.layers.2.mlp.down_proj.weight": [40326.2734375], "l2-model.layers.2.mlp.down_proj.weight": [6.790852069854736], "linf-model.layers.2.mlp.down_proj.weight": [0.0015015676617622375], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Che Guevara", "target_new": {"str": "Polish"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [9.107, 0.122, 0.008], "prob_new": [0.00011092660861322656, 0.8849652409553528, 0.9922423362731934], "prob_old": [0.9505294561386108, 0.04028242826461792, 0.00041493415483273566], "prob_new_token": [0.00011092660861322656, 0.8849652409553528, 0.9922423362731934], "prob_old_token": [0.9505294561386108, 0.04028242826461792, 0.00041493415483273566], "l1-model.layers.2.mlp.down_proj.weight": [36907.96875], "l2-model.layers.2.mlp.down_proj.weight": [5.545181751251221], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006788652390242], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Che Guevara", "target_new": {"str": "Portuguese"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [9.326, 0.301, 0.006], "prob_new": [8.90393421286717e-05, 0.739829421043396, 0.9939241409301758], "prob_old": [0.9505294561386108, 0.03256187215447426, 0.00019300830899737775], "prob_new_token": [8.90393421286717e-05, 0.739829421043396, 0.9939241409301758], "prob_old_token": [0.9505294561386108, 0.03256187215447426, 0.00019300830899737775], "l1-model.layers.2.mlp.down_proj.weight": [36324.0234375], "l2-model.layers.2.mlp.down_proj.weight": [5.4974236488342285], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Che Guevara", "target_new": {"str": "French"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [8.598, 1.568, 0.096, 0.009], "prob_new": [0.00018447762704454362, 0.20849855244159698, 0.9084750413894653, 0.9915237426757812], "prob_old": [0.9067447781562805, 0.0016322651645168662, 0.0011721047339960933, 2.9978689781273715e-05], "prob_new_token": [0.00018447762704454362, 0.20849855244159698, 0.9084750413894653, 0.9915237426757812], "prob_old_token": [0.9067447781562805, 0.0016322651645168662, 0.0011721047339960933, 2.9978689781273715e-05], "l1-model.layers.2.mlp.down_proj.weight": [39887.1796875], "l2-model.layers.2.mlp.down_proj.weight": [6.738794803619385], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024584718048573], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Lena Meyer-Landrut", "target_new": {"str": "Portuguese"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [7.693, 2.773, 0.026, 0.01], "prob_new": [0.0004560871166177094, 0.06250157952308655, 0.9746032357215881, 0.9904776215553284], "prob_old": [0.9067447781562805, 0.00012062645691912621, 0.0001113956022891216, 3.239895886508748e-05], "prob_new_token": [0.0004560871166177094, 0.06250157952308655, 0.9746032357215881, 0.9904776215553284], "prob_old_token": [0.9067447781562805, 0.00012062645691912621, 0.0001113956022891216, 3.239895886508748e-05], "l1-model.layers.2.mlp.down_proj.weight": [41607.26953125], "l2-model.layers.2.mlp.down_proj.weight": [6.875736236572266], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023108571767807], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Lena Meyer-Landrut", "target_new": {"str": "French"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [11.2, 5.822, 0.229, 0.017, 0.009], "prob_new": [1.367807180940872e-05, 0.002960256766527891, 0.7950228452682495, 0.9830822944641113, 0.9907466173171997], "prob_old": [0.9067447781562805, 0.000984766404144466, 0.0013152105966582894, 0.0002350057038711384, 0.00014580902643501759], "prob_new_token": [1.367807180940872e-05, 0.002960256766527891, 0.7950228452682495, 0.9830822944641113, 0.9907466173171997], "prob_old_token": [0.9067447781562805, 0.000984766404144466, 0.0013152105966582894, 0.0002350057038711384, 0.00014580902643501759], "l1-model.layers.2.mlp.down_proj.weight": [44124.2578125], "l2-model.layers.2.mlp.down_proj.weight": [7.8345417976379395], "linf-model.layers.2.mlp.down_proj.weight": [0.0019918871112167835], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Lena Meyer-Landrut", "target_new": {"str": "Polish"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [11.515, 1.859, 0.004], "prob_new": [9.974562999559566e-06, 0.15584208071231842, 0.9963615536689758], "prob_old": [0.9335379600524902, 0.00034946209052577615, 0.0008036447688937187], "prob_new_token": [9.974562999559566e-06, 0.15584208071231842, 0.9963615536689758], "prob_old_token": [0.9335379600524902, 0.00034946209052577615, 0.0008036447688937187], "l1-model.layers.2.mlp.down_proj.weight": [32841.6328125], "l2-model.layers.2.mlp.down_proj.weight": [5.206175327301025], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Elton John", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [8.841, 2.034, 0.012, 0.005], "prob_new": [0.00014465129061136395, 0.1308433711528778, 0.9875848889350891, 0.9955064058303833], "prob_old": [0.9335379600524902, 0.00659497594460845, 0.0029868721030652523, 0.00028707351884804666], "prob_new_token": [0.00014465129061136395, 0.1308433711528778, 0.9875848889350891, 0.9955064058303833], "prob_old_token": [0.9335379600524902, 0.00659497594460845, 0.0029868721030652523, 0.00028707351884804666], "l1-model.layers.2.mlp.down_proj.weight": [42262.265625], "l2-model.layers.2.mlp.down_proj.weight": [6.920079708099365], "linf-model.layers.2.mlp.down_proj.weight": [0.0015020016580820084], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Elton John", "target_new": {"str": "Italian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.926, 2.361, 0.014, 0.008], "prob_new": [1.7985934391617775e-05, 0.0943608209490776, 0.9863489270210266, 0.9919523596763611], "prob_old": [0.9335379600524902, 0.04702281206846237, 0.000467237870907411, 0.000351991766365245], "prob_new_token": [1.7985934391617775e-05, 0.0943608209490776, 0.9863489270210266, 0.9919523596763611], "prob_old_token": [0.9335379600524902, 0.04702281206846237, 0.000467237870907411, 0.000351991766365245], "l1-model.layers.2.mlp.down_proj.weight": [39249.72265625], "l2-model.layers.2.mlp.down_proj.weight": [6.7153000831604], "linf-model.layers.2.mlp.down_proj.weight": [0.00150207313708961], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Elton John", "target_new": {"str": "Portuguese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [5.175, 4.351, 3.798, 2.175, 0.206, 0.01, 0.006], "prob_new": [0.4517151713371277, 0.639151930809021, 0.5564369559288025, 0.6579931378364563, 0.8434126377105713, 0.9896230697631836, 0.9944350719451904], "prob_old": [0.963721752166748, 0.0001126748466049321, 0.0022958293557167053, 0.18844443559646606, 0.0009844184387475252, 6.890579243190587e-05, 4.301311128074303e-05], "prob_new_token": [5.046036335443205e-07, 2.335287035748479e-06, 1.6387441064580344e-05, 0.0015072248643264174, 0.5508671998977661, 0.9796534180641174, 0.9896989464759827], "prob_old_token": [0.963721752166748, 0.0001126748466049321, 0.0022958293557167053, 0.18844443559646606, 0.0009844184387475252, 6.890579243190587e-05, 4.301311128074303e-05], "l1-model.layers.2.mlp.down_proj.weight": [57795.7734375], "l2-model.layers.2.mlp.down_proj.weight": [10.062124252319336], "linf-model.layers.2.mlp.down_proj.weight": [0.002957921475172043], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Ron Paul", "target_new": {"str": "Belarusian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.67, 0.329, 0.016, 0.002], "prob_new": [0.00936946738511324, 0.7198923826217651, 0.9838929176330566, 0.9977489113807678], "prob_old": [0.963721752166748, 0.0016085499664768577, 0.0005871874163858593, 0.00021258083870634437], "prob_new_token": [0.00936946738511324, 0.7198923826217651, 0.9838929176330566, 0.9977489113807678], "prob_old_token": [0.963721752166748, 0.0016085499664768577, 0.0005871874163858593, 0.00021258083870634437], "l1-model.layers.2.mlp.down_proj.weight": [45717.8828125], "l2-model.layers.2.mlp.down_proj.weight": [7.142380714416504], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023406594991684], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Ron Paul", "target_new": {"str": "Spanish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.46, 3.869, 7.013, 2.396, 0.029, 0.011, 0.01, 0.01], "prob_new": [2.8656708309426904e-05, 0.020883789286017418, 0.0009000201243907213, 0.09109589457511902, 0.9709725975990295, 0.988995373249054, 0.9899948239326477, 0.9904217720031738], "prob_old": [0.963721752166748, 0.007291407790035009, 0.0018517060671001673, 0.0071746655739843845, 0.006137779448181391, 0.000627078814432025, 0.00030322480597533286, 0.00022991959122009575], "prob_new_token": [2.8656708309426904e-05, 0.020883789286017418, 0.0009000201243907213, 0.09109589457511902, 0.9709725975990295, 0.988995373249054, 0.9899948239326477, 0.9904217720031738], "prob_old_token": [0.963721752166748, 0.007291407790035009, 0.0018517060671001673, 0.0071746655739843845, 0.006137779448181391, 0.000627078814432025, 0.00030322480597533286, 0.00022991959122009575], "l1-model.layers.2.mlp.down_proj.weight": [59341.2578125], "l2-model.layers.2.mlp.down_proj.weight": [10.548018455505371], "linf-model.layers.2.mlp.down_proj.weight": [0.003187749534845352], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Ron Paul", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [15.002, 7.042, 5.806, 1.361, 0.051, 0.015, 0.008], "prob_new": [3.053645514228265e-07, 0.0008744062506593764, 0.0030092608649283648, 0.25633931159973145, 0.9504237174987793, 0.9851300120353699, 0.99165278673172], "prob_old": [0.9743273854255676, 0.13511736690998077, 0.9023029804229736, 0.12656503915786743, 0.007208663038909435, 0.0012514229165390134, 0.0004725571197923273], "prob_new_token": [3.053645514228265e-07, 0.0008744062506593764, 0.0030092608649283648, 0.25633931159973145, 0.9504237174987793, 0.9851300120353699, 0.99165278673172], "prob_old_token": [0.9743273854255676, 0.13511736690998077, 0.9023029804229736, 0.12656503915786743, 0.007208663038909435, 0.0012514229165390134, 0.0004725571197923273], "l1-model.layers.2.mlp.down_proj.weight": [59010.49609375], "l2-model.layers.2.mlp.down_proj.weight": [10.233424186706543], "linf-model.layers.2.mlp.down_proj.weight": [0.0029342062771320343], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Diego Vel\u00e1zquez", "target_new": {"str": "Polish"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [6.963, 1.155, 0.123, 0.026, 0.013, 0.006], "prob_new": [0.0009461003355681896, 0.31495901942253113, 0.8845831155776978, 0.9740522503852844, 0.9873273968696594, 0.9937593936920166], "prob_old": [0.9743273854255676, 0.5236449241638184, 0.015540976077318192, 0.014103489927947521, 0.00664526829496026, 0.002555352868512273], "prob_new_token": [0.0009461003355681896, 0.31495901942253113, 0.8845831155776978, 0.9740522503852844, 0.9873273968696594, 0.9937593936920166], "prob_old_token": [0.9743273854255676, 0.5236449241638184, 0.015540976077318192, 0.014103489927947521, 0.00664526829496026, 0.002555352868512273], "l1-model.layers.2.mlp.down_proj.weight": [59344.703125], "l2-model.layers.2.mlp.down_proj.weight": [9.62523365020752], "linf-model.layers.2.mlp.down_proj.weight": [0.0025040488690137863], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Diego Vel\u00e1zquez", "target_new": {"str": "English"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [5.611, 2.214, 1.021, 0.009], "prob_new": [0.6599305868148804, 0.6558334231376648, 0.6754443645477295, 0.99066162109375], "prob_old": [0.9743273854255676, 0.04562626779079437, 0.029269255697727203, 0.0004762849712278694], "prob_new_token": [4.9882252994848386e-08, 0.0013514981837943196, 0.04779503494501114, 0.9907019734382629], "prob_old_token": [0.9743273854255676, 0.04562626779079437, 0.029269255697727203, 0.0004762849712278694], "l1-model.layers.2.mlp.down_proj.weight": [39786.0625], "l2-model.layers.2.mlp.down_proj.weight": [6.693039894104004], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Diego Vel\u00e1zquez", "target_new": {"str": "Tibetan"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [7.852, 0.698, 0.004], "prob_new": [0.0003889748186338693, 0.4975459575653076, 0.9962365627288818], "prob_old": [0.9478781223297119, 0.0023420541547238827, 0.0001894082670332864], "prob_new_token": [0.0003889748186338693, 0.4975459575653076, 0.9962365627288818], "prob_old_token": [0.9478781223297119, 0.0023420541547238827, 0.0001894082670332864], "l1-model.layers.2.mlp.down_proj.weight": [34039.0078125], "l2-model.layers.2.mlp.down_proj.weight": [5.311419486999512], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Shawn Michaels", "target_new": {"str": "French"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.067, 0.498, 0.001], "prob_new": [4.2442959966138005e-05, 0.6079086661338806, 0.9990519285202026], "prob_old": [0.9478781223297119, 0.004402295686304569, 2.2273618014878593e-05], "prob_new_token": [4.2442959966138005e-05, 0.6079086661338806, 0.9990519285202026], "prob_old_token": [0.9478781223297119, 0.004402295686304569, 2.2273618014878593e-05], "l1-model.layers.2.mlp.down_proj.weight": [33727.5546875], "l2-model.layers.2.mlp.down_proj.weight": [5.285173416137695], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Shawn Michaels", "target_new": {"str": "Polish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [9.911, 0.394, 0.012, 0.006], "prob_new": [4.965057814843021e-05, 0.6743322610855103, 0.9879499673843384, 0.9939154982566833], "prob_old": [0.9478781223297119, 0.0017862094100564718, 0.0008067570743151009, 0.00034772141952998936], "prob_new_token": [4.965057814843021e-05, 0.6743322610855103, 0.9879499673843384, 0.9939154982566833], "prob_old_token": [0.9478781223297119, 0.0017862094100564718, 0.0008067570743151009, 0.00034772141952998936], "l1-model.layers.2.mlp.down_proj.weight": [44165.02734375], "l2-model.layers.2.mlp.down_proj.weight": [7.030640602111816], "linf-model.layers.2.mlp.down_proj.weight": [0.0015020053833723068], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Shawn Michaels", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.692, 5.063, 2.224, 0.205, 0.05, 0.016, 0.008], "prob_new": [0.4689624011516571, 0.3842869699001312, 0.49056023359298706, 0.829850971698761, 0.9522395133972168, 0.9843419194221497, 0.9922772645950317], "prob_old": [0.943435788154602, 0.0001317343849223107, 0.0009110470418818295, 0.0009314795024693012, 0.00013100822980049998, 2.8424914489733055e-05, 1.0659383406164125e-05], "prob_new_token": [2.2210680583611975e-07, 5.2043185860384256e-05, 0.012071202509105206, 0.671076774597168, 0.9114683866500854, 0.9728402495384216, 0.987263560295105], "prob_old_token": [0.943435788154602, 0.0001317343849223107, 0.0009110470418818295, 0.0009314795024693012, 0.00013100822980049998, 2.8424914489733055e-05, 1.0659383406164125e-05], "l1-model.layers.2.mlp.down_proj.weight": [54795.546875], "l2-model.layers.2.mlp.down_proj.weight": [9.675862312316895], "linf-model.layers.2.mlp.down_proj.weight": [0.0029804371297359467], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Brad Pitt", "target_new": {"str": "Serbian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [11.79, 6.863, 0.195, 0.039, 0.025, 0.021, 0.018, 0.015, 0.012, 0.009], "prob_new": [7.581997124361806e-06, 0.0010459829354658723, 0.8231757879257202, 0.9614242315292358, 0.9749278426170349, 0.9790734052658081, 0.981742262840271, 0.9849030375480652, 0.9882946014404297, 0.9911436438560486], "prob_old": [0.943435788154602, 7.501655636588112e-05, 0.006060909014195204, 0.002903552493080497, 0.002577531384304166, 0.002363737905398011, 0.002181001240387559, 0.0018262850353494287, 0.0013825396308675408, 0.0009984528878703713], "prob_new_token": [7.581997124361806e-06, 0.0010459829354658723, 0.8231757879257202, 0.9614242315292358, 0.9749278426170349, 0.9790734052658081, 0.981742262840271, 0.9849030375480652, 0.9882946014404297, 0.9911436438560486], "prob_old_token": [0.943435788154602, 7.501655636588112e-05, 0.006060909014195204, 0.002903552493080497, 0.002577531384304166, 0.002363737905398011, 0.002181001240387559, 0.0018262850353494287, 0.0013825396308675408, 0.0009984528878703713], "l1-model.layers.2.mlp.down_proj.weight": [75200.8515625], "l2-model.layers.2.mlp.down_proj.weight": [12.814562797546387], "linf-model.layers.2.mlp.down_proj.weight": [0.004463657736778259], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Brad Pitt", "target_new": {"str": "Polish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.935, 2.809, 2.065, 0.691, 0.009], "prob_new": [1.782783328962978e-05, 0.060254767537117004, 0.12679634988307953, 0.50116366147995, 0.9910962581634521], "prob_old": [0.943435788154602, 0.007436210289597511, 0.04035770148038864, 0.04011360555887222, 0.00012423864973243326], "prob_new_token": [1.782783328962978e-05, 0.060254767537117004, 0.12679634988307953, 0.50116366147995, 0.9910962581634521], "prob_old_token": [0.943435788154602, 0.007436210289597511, 0.04035770148038864, 0.04011360555887222, 0.00012423864973243326], "l1-model.layers.2.mlp.down_proj.weight": [45633.5546875], "l2-model.layers.2.mlp.down_proj.weight": [7.908931732177734], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058341324329376], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Brad Pitt", "target_new": {"str": "Portuguese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [8.312, 3.523, 0.027, 0.007], "prob_new": [0.00024549313820898533, 0.029522106051445007, 0.9732820391654968, 0.9928749799728394], "prob_old": [0.9237022399902344, 0.0003177932230755687, 0.001700571388937533, 0.000337983132340014], "prob_new_token": [0.00024549313820898533, 0.029522106051445007, 0.9732820391654968, 0.9928749799728394], "prob_old_token": [0.9237022399902344, 0.0003177932230755687, 0.001700571388937533, 0.000337983132340014], "l1-model.layers.2.mlp.down_proj.weight": [40288.265625], "l2-model.layers.2.mlp.down_proj.weight": [6.773800849914551], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023034065961838], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "John Couch Adams", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [5.496, 3.442, 0.135, 0.015, 0.013, 0.012, 0.007], "prob_new": [0.4994572401046753, 0.49989789724349976, 0.8815698623657227, 0.9850467443466187, 0.9867641925811768, 0.9886065125465393, 0.9932219386100769], "prob_old": [0.9237022399902344, 0.001509420690126717, 0.00440010242164135, 9.728475561132655e-05, 1.3783930626232177e-05, 5.436498213384766e-06, 2.8307331376709044e-06], "prob_new_token": [1.6856196452863514e-05, 0.0010243597207590938, 0.7632166743278503, 0.9701348543167114, 0.9735528826713562, 0.9772311449050903, 0.9864577054977417], "prob_old_token": [0.9237022399902344, 0.001509420690126717, 0.00440010242164135, 9.728475561132655e-05, 1.3783930626232177e-05, 5.436498213384766e-06, 2.8307331376709044e-06], "l1-model.layers.2.mlp.down_proj.weight": [60456.375], "l2-model.layers.2.mlp.down_proj.weight": [10.345192909240723], "linf-model.layers.2.mlp.down_proj.weight": [0.0029969485476613045], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "John Couch Adams", "target_new": {"str": "Danish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [8.181, 2.619, 0.031, 0.058, 0.014, 0.01], "prob_new": [0.0002799401117954403, 0.07286540418863297, 0.9690634608268738, 0.94377201795578, 0.9863009452819824, 0.9905116558074951], "prob_old": [0.9237022399902344, 0.0008760614437051117, 0.00021363624546211213, 0.00015707724378444254, 0.00017192531959153712, 0.00023149514163378626], "prob_new_token": [0.0002799401117954403, 0.07286540418863297, 0.9690634608268738, 0.94377201795578, 0.9863009452819824, 0.9905116558074951], "prob_old_token": [0.9237022399902344, 0.0008760614437051117, 0.00021363624546211213, 0.00015707724378444254, 0.00017192531959153712, 0.00023149514163378626], "l1-model.layers.2.mlp.down_proj.weight": [57765.21484375], "l2-model.layers.2.mlp.down_proj.weight": [9.532441139221191], "linf-model.layers.2.mlp.down_proj.weight": [0.0025002192705869675], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "John Couch Adams", "target_new": {"str": "Spanish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [8.602, 1.029, 0.051, 0.02, 0.008], "prob_new": [0.0001836951560107991, 0.3574216365814209, 0.9505249857902527, 0.9799681901931763, 0.991671621799469], "prob_old": [0.9152178764343262, 0.0020488728769123554, 0.0003937573928851634, 0.0001307297934545204, 6.0731832491001114e-05], "prob_new_token": [0.0001836951560107991, 0.3574216365814209, 0.9505249857902527, 0.9799681901931763, 0.991671621799469], "prob_old_token": [0.9152178764343262, 0.0020488728769123554, 0.0003937573928851634, 0.0001307297934545204, 6.0731832491001114e-05], "l1-model.layers.2.mlp.down_proj.weight": [49817.78515625], "l2-model.layers.2.mlp.down_proj.weight": [8.254173278808594], "linf-model.layers.2.mlp.down_proj.weight": [0.0020040974486619234], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Stefan Raab", "target_new": {"str": "Dutch"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [4.808, 0.552, 0.081, 0.01], "prob_new": [0.008163408376276493, 0.575648307800293, 0.9223567247390747, 0.9901472926139832], "prob_old": [0.9152178764343262, 0.00996353104710579, 0.02045188657939434, 0.0024279579520225525], "prob_new_token": [0.008163408376276493, 0.575648307800293, 0.9223567247390747, 0.9901472926139832], "prob_old_token": [0.9152178764343262, 0.00996353104710579, 0.02045188657939434, 0.0024279579520225525], "l1-model.layers.2.mlp.down_proj.weight": [46296.078125], "l2-model.layers.2.mlp.down_proj.weight": [7.165041446685791], "linf-model.layers.2.mlp.down_proj.weight": [0.00150247011333704], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Stefan Raab", "target_new": {"str": "English"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [5.713, 3.227, 0.429, 0.025, 0.051, 0.025, 0.009], "prob_new": [0.4993707835674286, 0.4997440278530121, 0.7118738293647766, 0.9757792949676514, 0.951439619064331, 0.9758208990097046, 0.9910157918930054], "prob_old": [0.9152178764343262, 0.00035687442868947983, 9.796402446227148e-05, 0.00031214836053550243, 0.0005426791030913591, 0.0002522455179132521, 8.866472489899024e-05], "prob_new_token": [1.0914890481217299e-05, 0.0015768504235893488, 0.4239075183868408, 0.9516262412071228, 0.9029302000999451, 0.9516720771789551, 0.9820513725280762], "prob_old_token": [0.9152178764343262, 0.00035687442868947983, 9.796402446227148e-05, 0.00031214836053550243, 0.0005426791030913591, 0.0002522455179132521, 8.866472489899024e-05], "l1-model.layers.2.mlp.down_proj.weight": [57919.71875], "l2-model.layers.2.mlp.down_proj.weight": [10.089092254638672], "linf-model.layers.2.mlp.down_proj.weight": [0.0029919613152742386], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Stefan Raab", "target_new": {"str": "Danish"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [8.481, 5.401, 10.507, 0.04, 0.02, 0.027, 0.034, 0.033, 0.029, 0.025, 0.022, 0.019, 0.017, 0.015, 0.014, 0.012, 0.011, 0.01], "prob_new": [0.0002073314826702699, 0.004513198509812355, 2.735647103691008e-05, 0.9611201286315918, 0.9797616004943848, 0.9730778932571411, 0.9668121933937073, 0.9673154354095459, 0.9712941646575928, 0.9752271175384521, 0.9783135056495667, 0.9808177351951599, 0.9829646944999695, 0.9848368763923645, 0.9864741563796997, 0.9879056215286255, 0.9891575574874878, 0.990256130695343], "prob_old": [0.929573118686676, 0.003504138672724366, 1.7186670447699726e-05, 0.0012629994889721274, 0.0007535559707321227, 0.0014299367321655154, 0.0018829184118658304, 0.0019309862982481718, 0.0017646991182118654, 0.001624027150683105, 0.00154952984303236, 0.0014888535952195525, 0.0014081335393711925, 0.0013045654632151127, 0.0011876131175085902, 0.0010675997473299503, 0.0009518276201561093, 0.0008445936837233603], "prob_new_token": [0.0002073314826702699, 0.004513198509812355, 2.735647103691008e-05, 0.9611201286315918, 0.9797616004943848, 0.9730778932571411, 0.9668121933937073, 0.9673154354095459, 0.9712941646575928, 0.9752271175384521, 0.9783135056495667, 0.9808177351951599, 0.9829646944999695, 0.9848368763923645, 0.9864741563796997, 0.9879056215286255, 0.9891575574874878, 0.990256130695343], "prob_old_token": [0.929573118686676, 0.003504138672724366, 1.7186670447699726e-05, 0.0012629994889721274, 0.0007535559707321227, 0.0014299367321655154, 0.0018829184118658304, 0.0019309862982481718, 0.0017646991182118654, 0.001624027150683105, 0.00154952984303236, 0.0014888535952195525, 0.0014081335393711925, 0.0013045654632151127, 0.0011876131175085902, 0.0010675997473299503, 0.0009518276201561093, 0.0008445936837233603], "l1-model.layers.2.mlp.down_proj.weight": [91194.8828125], "l2-model.layers.2.mlp.down_proj.weight": [16.016830444335938], "linf-model.layers.2.mlp.down_proj.weight": [0.007328505627810955], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Roger Taylor", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.275, 3.904, 4.953, 2.601, 0.993, 0.682, 0.217, 0.036, 0.019, 0.014, 0.009], "prob_new": [0.443070650100708, 0.2587791979312897, 0.4917866885662079, 0.4966531991958618, 0.5608996152877808, 0.6239254474639893, 0.8227267265319824, 0.9651472568511963, 0.9816242456436157, 0.9857787489891052, 0.990693211555481], "prob_old": [0.929573118686676, 0.035256724804639816, 0.03809082880616188, 0.0483231283724308, 0.04727441817522049, 0.027152275666594505, 0.010289099998772144, 0.0012162078637629747, 0.00017543802096042782, 5.800960570923053e-05, 2.529645098547917e-05], "prob_new_token": [5.417981014943507e-07, 0.0007868118118494749, 5.068414975539781e-05, 0.005576219409704208, 0.1398967206478119, 0.2583216726779938, 0.6536253690719604, 0.9385358691215515, 0.9713781476020813, 0.9783318638801575, 0.9862112998962402], "prob_old_token": [0.929573118686676, 0.035256724804639816, 0.03809082880616188, 0.0483231283724308, 0.04727441817522049, 0.027152275666594505, 0.010289099998772144, 0.0012162078637629747, 0.00017543802096042782, 5.800960570923053e-05, 2.529645098547917e-05], "l1-model.layers.2.mlp.down_proj.weight": [73615.25], "l2-model.layers.2.mlp.down_proj.weight": [12.983128547668457], "linf-model.layers.2.mlp.down_proj.weight": [0.004746881313621998], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Roger Taylor", "target_new": {"str": "Serbian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.882, 3.042, 1.538, 0.669, 0.062, 0.02, 0.007], "prob_new": [0.39106521010398865, 0.6511871814727783, 0.6680087447166443, 0.7065200209617615, 0.942753255367279, 0.9802274107933044, 0.992784857749939], "prob_old": [0.929573118686676, 0.0073803192935884, 0.10248865187168121, 0.002723391866311431, 0.0011105987941846251, 0.00042789007420651615, 0.00017753816791810095], "prob_new_token": [2.477214593454846e-06, 0.00011391650332370773, 0.009961654432117939, 0.13689477741718292, 0.8337485790252686, 0.9429513812065125, 0.9796815514564514], "prob_old_token": [0.929573118686676, 0.0073803192935884, 0.10248865187168121, 0.002723391866311431, 0.0011105987941846251, 0.00042789007420651615, 0.00017753816791810095], "l1-model.layers.2.mlp.down_proj.weight": [59259.24609375], "l2-model.layers.2.mlp.down_proj.weight": [10.203078269958496], "linf-model.layers.2.mlp.down_proj.weight": [0.002994186244904995], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Roger Taylor", "target_new": {"str": "Belarusian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [6.417, 1.58, 0.057, 0.005], "prob_new": [0.0016329727368429303, 0.20602832734584808, 0.9448749423027039, 0.9949449896812439], "prob_old": [0.9201338887214661, 0.005884871818125248, 0.0004546364361885935, 0.00010071675205836073], "prob_new_token": [0.0016329727368429303, 0.20602832734584808, 0.9448749423027039, 0.9949449896812439], "prob_old_token": [0.9201338887214661, 0.005884871818125248, 0.0004546364361885935, 0.00010071675205836073], "l1-model.layers.2.mlp.down_proj.weight": [41903.13671875], "l2-model.layers.2.mlp.down_proj.weight": [6.896817684173584], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024454332888126], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "John Bardeen", "target_new": {"str": "French"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [9.157, 1.935, 0.06, 0.006], "prob_new": [0.00010551170271355659, 0.1443594992160797, 0.9420962333679199, 0.9936594367027283], "prob_old": [0.9201338887214661, 0.02378740906715393, 0.0064993551932275295, 0.001006669132038951], "prob_new_token": [0.00010551170271355659, 0.1443594992160797, 0.9420962333679199, 0.9936594367027283], "prob_old_token": [0.9201338887214661, 0.02378740906715393, 0.0064993551932275295, 0.001006669132038951], "l1-model.layers.2.mlp.down_proj.weight": [41717.56640625], "l2-model.layers.2.mlp.down_proj.weight": [6.888811111450195], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024598687887192], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "John Bardeen", "target_new": {"str": "German"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.445, 0.927, 0.058, 0.001], "prob_new": [0.6621203422546387, 0.682491660118103, 0.9455430507659912, 0.9991616606712341], "prob_old": [0.9201338887214661, 0.00042217926238663495, 1.781065293471329e-05, 9.171509987027093e-07], "prob_new_token": [1.6410438092862023e-06, 0.06285974383354187, 0.8536508083343506, 0.9988610744476318], "prob_old_token": [0.9201338887214661, 0.00042217926238663495, 1.781065293471329e-05, 9.171509987027093e-07], "l1-model.layers.2.mlp.down_proj.weight": [41559.9375], "l2-model.layers.2.mlp.down_proj.weight": [6.8819804191589355], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024663880467415], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "John Bardeen", "target_new": {"str": "Tibetan"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [9.179, 4.942, 0.983, 0.048, 0.011, 0.007], "prob_new": [0.00010316911357222125, 0.007136869709938765, 0.37400737404823303, 0.9530462622642517, 0.9893676042556763, 0.9925439953804016], "prob_old": [0.941252589225769, 0.006242157891392708, 0.10499779880046844, 0.03017539717257023, 0.004981903359293938, 0.0024594119749963284], "prob_new_token": [0.00010316911357222125, 0.007136869709938765, 0.37400737404823303, 0.9530462622642517, 0.9893676042556763, 0.9925439953804016], "prob_old_token": [0.941252589225769, 0.006242157891392708, 0.10499779880046844, 0.03017539717257023, 0.004981903359293938, 0.0024594119749963284], "l1-model.layers.2.mlp.down_proj.weight": [53773.30078125], "l2-model.layers.2.mlp.down_proj.weight": [9.237442016601562], "linf-model.layers.2.mlp.down_proj.weight": [0.002496819943189621], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Michel Rocard", "target_new": {"str": "German"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [8.341, 3.875, 3.236, 0.22, 0.063, 0.02, 0.009], "prob_new": [0.00023864550166763365, 0.02074633352458477, 0.039325494319200516, 0.8025240898132324, 0.9387331604957581, 0.980137050151825, 0.9914472103118896], "prob_old": [0.941252589225769, 0.0002307479444425553, 0.000907934969291091, 0.0062998211942613125, 0.0014783474616706371, 0.0005596388946287334, 0.0003036096750292927], "prob_new_token": [0.00023864550166763365, 0.02074633352458477, 0.039325494319200516, 0.8025240898132324, 0.9387331604957581, 0.980137050151825, 0.9914472103118896], "prob_old_token": [0.941252589225769, 0.0002307479444425553, 0.000907934969291091, 0.0062998211942613125, 0.0014783474616706371, 0.0005596388946287334, 0.0003036096750292927], "l1-model.layers.2.mlp.down_proj.weight": [56558.32421875], "l2-model.layers.2.mlp.down_proj.weight": [9.88780689239502], "linf-model.layers.2.mlp.down_proj.weight": [0.0029809297993779182], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Michel Rocard", "target_new": {"str": "Spanish"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [5.292, 1.847, 0.229, 0.003], "prob_new": [0.6435356140136719, 0.6641862392425537, 0.8311094045639038, 0.9972505569458008], "prob_old": [0.941252589225769, 0.018607426434755325, 0.0003692162281367928, 1.220803187607089e-05], "prob_new_token": [1.367483690728477e-07, 0.003964968957006931, 0.5123884081840515, 0.9980127215385437], "prob_old_token": [0.941252589225769, 0.018607426434755325, 0.0003692162281367928, 1.220803187607089e-05], "l1-model.layers.2.mlp.down_proj.weight": [37280.203125], "l2-model.layers.2.mlp.down_proj.weight": [6.500486373901367], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024812892079353], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Michel Rocard", "target_new": {"str": "Tibetan"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [3.638, 0.136, 0.005], "prob_new": [0.7484524846076965, 0.8921396732330322, 0.9952157735824585], "prob_old": [0.9928372502326965, 1.6844127458170988e-05, 2.139579692084226e-06], "prob_new_token": [4.814639851247193e-07, 0.596370279788971, 0.9928348064422607], "prob_old_token": [0.9928372502326965, 1.6844127458170988e-05, 2.139579692084226e-06], "l1-model.layers.2.mlp.down_proj.weight": [36435.234375], "l2-model.layers.2.mlp.down_proj.weight": [5.509518623352051], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006787488237023], "request": {"prompt": "{} is skilled at playing the", "subject": "Vinnie Colaiuta", "target_new": {"str": "saxophone"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [9.417, 0.922, 0.024, 0.02, 0.015, 0.015, 0.013, 0.01, 0.007], "prob_new": [8.129088382702321e-05, 0.39789512753486633, 0.9761866331100464, 0.9803041219711304, 0.9848434329032898, 0.9852554202079773, 0.9867044687271118, 0.9898526668548584, 0.9933445453643799], "prob_old": [0.9928372502326965, 3.738547820830718e-05, 3.6483763210526376e-07, 2.359476241053926e-07, 1.1411814426764977e-07, 6.033635457924902e-08, 3.950778193484439e-08, 2.4388599584312942e-08, 1.2800930093703755e-08], "prob_new_token": [8.129088382702321e-05, 0.39789512753486633, 0.9761866331100464, 0.9803041219711304, 0.9848434329032898, 0.9852554202079773, 0.9867044687271118, 0.9898526668548584, 0.9933445453643799], "prob_old_token": [0.9928372502326965, 3.738547820830718e-05, 3.6483763210526376e-07, 2.359476241053926e-07, 1.1411814426764977e-07, 6.033635457924902e-08, 3.950778193484439e-08, 2.4388599584312942e-08, 1.2800930093703755e-08], "l1-model.layers.2.mlp.down_proj.weight": [81381.8125], "l2-model.layers.2.mlp.down_proj.weight": [12.595439910888672], "linf-model.layers.2.mlp.down_proj.weight": [0.004023641347885132], "request": {"prompt": "{} is skilled at playing the", "subject": "Vinnie Colaiuta", "target_new": {"str": "piano"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [7.813, 1.407, 0.011, 0.016, 0.009], "prob_new": [0.49950799345970154, 0.5289204120635986, 0.9887372851371765, 0.984659731388092, 0.9910633563995361], "prob_old": [0.9928372502326965, 0.001581663964316249, 1.5609001593475114e-06, 1.229988015438721e-06, 6.921783892721578e-07], "prob_new_token": [1.6373677169667644e-07, 0.060118403285741806, 0.9776812791824341, 0.9694778323173523, 0.9822771549224854], "prob_old_token": [0.9928372502326965, 0.001581663964316249, 1.5609001593475114e-06, 1.229988015438721e-06, 6.921783892721578e-07], "l1-model.layers.2.mlp.down_proj.weight": [53149.859375], "l2-model.layers.2.mlp.down_proj.weight": [8.45000171661377], "linf-model.layers.2.mlp.down_proj.weight": [0.002004890702664852], "request": {"prompt": "{} is skilled at playing the", "subject": "Vinnie Colaiuta", "target_new": {"str": "violin"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [6.961, 2.955, 1.983, 0.563, 0.053, 0.015, 0.008], "prob_new": [0.4658449590206146, 0.4947623014450073, 0.4882109761238098, 0.6549019813537598, 0.9492634534835815, 0.9851357936859131, 0.9923971891403198], "prob_old": [0.9486104846000671, 0.06549094617366791, 0.0005394757026806474, 0.0032553665805608034, 2.8913957066833973e-05, 4.917845672025578e-06, 2.494389946150477e-06], "prob_new_token": [9.655960866439273e-07, 0.002749714069068432, 0.019815480336546898, 0.3315739929676056, 0.9179884791374207, 0.9837035536766052, 0.994088888168335], "prob_old_token": [0.9486104846000671, 0.06549094617366791, 0.0005394757026806474, 0.0032553665805608034, 2.8913957066833973e-05, 4.917845672025578e-06, 2.494389946150477e-06], "l1-model.layers.2.mlp.down_proj.weight": [60331.890625], "l2-model.layers.2.mlp.down_proj.weight": [10.148429870605469], "linf-model.layers.2.mlp.down_proj.weight": [0.0029987432062625885], "request": {"prompt": "{} is skilled at playing the", "subject": "Albert King", "target_new": {"str": "zither"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [6.335, 1.409, 0.115, 0.026, 0.012, 0.008], "prob_new": [0.17271284759044647, 0.5030159950256348, 0.8955990076065063, 0.9743260145187378, 0.9885401129722595, 0.9918009042739868], "prob_old": [0.9486104846000671, 0.045038383454084396, 0.004258471075445414, 0.0013684335863217711, 0.0006518079899251461, 0.0004831963451579213], "prob_new_token": [9.100410352402832e-06, 0.06339804083108902, 0.8087011575698853, 0.9550995826721191, 0.9798604249954224, 0.985143780708313], "prob_old_token": [0.9486104846000671, 0.045038383454084396, 0.004258471075445414, 0.0013684335863217711, 0.0006518079899251461, 0.0004831963451579213], "l1-model.layers.2.mlp.down_proj.weight": [61550.5234375], "l2-model.layers.2.mlp.down_proj.weight": [9.763406753540039], "linf-model.layers.2.mlp.down_proj.weight": [0.0024935994297266006], "request": {"prompt": "{} is skilled at playing the", "subject": "Albert King", "target_new": {"str": "sitar"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [8.096, 2.325, 2.199, 0.216, 0.028, 0.01], "prob_new": [0.49831685423851013, 0.49978646636009216, 0.48890069127082825, 0.8214520812034607, 0.9722781777381897, 0.9903638362884521], "prob_old": [0.9486104846000671, 0.41416242718696594, 0.0013603765983134508, 0.007215788587927818, 0.0006912312237545848, 0.0001390193501720205], "prob_new_token": [9.312177695619539e-08, 0.009652036242187023, 0.012736624106764793, 0.6597030758857727, 0.9528728723526001, 0.9849681258201599], "prob_old_token": [0.9486104846000671, 0.41416242718696594, 0.0013603765983134508, 0.007215788587927818, 0.0006912312237545848, 0.0001390193501720205], "l1-model.layers.2.mlp.down_proj.weight": [52578.8671875], "l2-model.layers.2.mlp.down_proj.weight": [8.93325424194336], "linf-model.layers.2.mlp.down_proj.weight": [0.0024957172572612762], "request": {"prompt": "{} is skilled at playing the", "subject": "Albert King", "target_new": {"str": "accordion"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [11.993, 0.378, 0.033, 0.01], "prob_new": [6.185950496728765e-06, 0.6851323246955872, 0.9677159786224365, 0.9902796149253845], "prob_old": [0.9924374222755432, 0.002854090416803956, 0.0012284552212804556, 0.0005670971586368978], "prob_new_token": [6.185950496728765e-06, 0.6851323246955872, 0.9677159786224365, 0.9902796149253845], "prob_old_token": [0.9924374222755432, 0.002854090416803956, 0.0012284552212804556, 0.0005670971586368978], "l1-model.layers.2.mlp.down_proj.weight": [48800.6171875], "l2-model.layers.2.mlp.down_proj.weight": [7.320572376251221], "linf-model.layers.2.mlp.down_proj.weight": [0.001502237282693386], "request": {"prompt": "{} is skilled at playing the", "subject": "Jo Jones", "target_new": {"str": "guitar"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [6.077, 0.548, 0.113, 0.042, 0.017, 0.008], "prob_new": [0.48729291558265686, 0.6671044826507568, 0.8990494608879089, 0.9594348669052124, 0.9836547374725342, 0.9916114807128906], "prob_old": [0.9924374222755432, 0.0024368353188037872, 0.001211108872666955, 0.0005415540654212236, 0.00016342282469850034, 5.072142812423408e-05], "prob_new_token": [5.408557171904249e-06, 0.33458462357521057, 0.7984298467636108, 0.9192134737968445, 0.9677120447158813, 0.9836910367012024], "prob_old_token": [0.9924374222755432, 0.0024368353188037872, 0.001211108872666955, 0.0005415540654212236, 0.00016342282469850034, 5.072142812423408e-05], "l1-model.layers.2.mlp.down_proj.weight": [65798.375], "l2-model.layers.2.mlp.down_proj.weight": [9.952567100524902], "linf-model.layers.2.mlp.down_proj.weight": [0.002505822107195854], "request": {"prompt": "{} is skilled at playing the", "subject": "Jo Jones", "target_new": {"str": "flute"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [8.632, 2.275, 0.174, 0.051, 0.022, 0.013, 0.009], "prob_new": [0.00017830156139098108, 0.10282351821660995, 0.8401201367378235, 0.9499397873878479, 0.9782424569129944, 0.9867917895317078, 0.9909070730209351], "prob_old": [0.9924374222755432, 0.0044352347031235695, 4.3448559154057875e-05, 3.2146078865480376e-06, 7.429079005305539e-07, 2.6119835183635587e-07, 1.3882039695545245e-07], "prob_new_token": [0.00017830156139098108, 0.10282351821660995, 0.8401201367378235, 0.9499397873878479, 0.9782424569129944, 0.9867917895317078, 0.9909070730209351], "prob_old_token": [0.9924374222755432, 0.0044352347031235695, 4.3448559154057875e-05, 3.2146078865480376e-06, 7.429079005305539e-07, 2.6119835183635587e-07, 1.3882039695545245e-07], "l1-model.layers.2.mlp.down_proj.weight": [70227.46875], "l2-model.layers.2.mlp.down_proj.weight": [10.874922752380371], "linf-model.layers.2.mlp.down_proj.weight": [0.0029936013743281364], "request": {"prompt": "{} is skilled at playing the", "subject": "Jo Jones", "target_new": {"str": "piano"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [3.293, 0.181, 0.006], "prob_new": [0.7288010120391846, 0.8684708476066589, 0.9942982196807861], "prob_old": [0.9852087497711182, 0.49568262696266174, 0.4988461434841156], "prob_new_token": [2.0787772427866003e-06, 0.49464455246925354, 0.9835661053657532], "prob_old_token": [0.9709790349006653, 0.0006418222328647971, 3.03542929032119e-05], "l1-model.layers.2.mlp.down_proj.weight": [35947.90234375], "l2-model.layers.2.mlp.down_proj.weight": [5.475983619689941], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006786324083805], "request": {"prompt": "{} is skilled at playing the", "subject": "Giuseppe Tartini", "target_new": {"str": "saxophone"}, "old_answer": {"str": "violin"}, "seed": 42}}, {"loss_per_step": [5.98, 1.156, 0.057, 0.027, 0.015, 0.009], "prob_new": [0.10602843761444092, 0.5410300493240356, 0.9456995129585266, 0.9736399054527283, 0.9854762554168701, 0.9909449815750122], "prob_old": [0.9852087497711182, 0.4718564748764038, 0.4601147174835205, 0.4814305007457733, 0.4869452714920044, 0.4885002374649048], "prob_new_token": [3.0160153983160853e-05, 0.10086815059185028, 0.9027441740036011, 0.9531561732292175, 0.9745945334434509, 0.9844920635223389], "prob_old_token": [0.9709790349006653, 0.003365296870470047, 4.066859219165053e-06, 8.201788546102762e-07, 2.9761622499790974e-07, 1.6416723269685463e-07], "l1-model.layers.2.mlp.down_proj.weight": [60172.5625], "l2-model.layers.2.mlp.down_proj.weight": [9.693849563598633], "linf-model.layers.2.mlp.down_proj.weight": [0.0025057513266801834], "request": {"prompt": "{} is skilled at playing the", "subject": "Giuseppe Tartini", "target_new": {"str": "sitar"}, "old_answer": {"str": "violin"}, "seed": 42}}, {"loss_per_step": [11.015, 0.206, 0.043, 0.015, 0.008], "prob_new": [1.6446041627204977e-05, 0.8135641813278198, 0.9574918150901794, 0.9853311777114868, 0.9916942119598389], "prob_old": [0.9852087497711182, 0.4794958531856537, 0.47237628698349, 0.46346357464790344, 0.4598604738712311], "prob_new_token": [1.6446041627204977e-05, 0.8135641813278198, 0.9574918150901794, 0.9853311777114868, 0.9916942119598389], "prob_old_token": [0.9709790349006653, 0.00010815144196385518, 3.009255124197807e-05, 6.194747129484313e-06, 1.5372152120107785e-06], "l1-model.layers.2.mlp.down_proj.weight": [57957.078125], "l2-model.layers.2.mlp.down_proj.weight": [8.732937812805176], "linf-model.layers.2.mlp.down_proj.weight": [0.002004780573770404], "request": {"prompt": "{} is skilled at playing the", "subject": "Giuseppe Tartini", "target_new": {"str": "organ"}, "old_answer": {"str": "violin"}, "seed": 42}}, {"loss_per_step": [11.938, 2.765, 0.441, 0.017, 0.003], "prob_new": [6.539051810250385e-06, 0.06299103051424026, 0.6432160139083862, 0.9826980829238892, 0.9970732927322388], "prob_old": [0.9751524925231934, 0.011689310893416405, 0.003387694014236331, 8.123679435811937e-05, 1.0076742910314351e-05], "prob_new_token": [6.539051810250385e-06, 0.06299103051424026, 0.6432160139083862, 0.9826980829238892, 0.9970732927322388], "prob_old_token": [0.9751524925231934, 0.011689310893416405, 0.003387694014236331, 8.123679435811937e-05, 1.0076742910314351e-05], "l1-model.layers.2.mlp.down_proj.weight": [53958.53515625], "l2-model.layers.2.mlp.down_proj.weight": [8.500072479248047], "linf-model.layers.2.mlp.down_proj.weight": [0.0019873101264238358], "request": {"prompt": "{} is skilled at playing the", "subject": "Friedrich Kalkbrenner", "target_new": {"str": "drums"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.966, 2.636, 0.162, 0.004], "prob_new": [0.49824899435043335, 0.49978071451187134, 0.8610246181488037, 0.9958628416061401], "prob_old": [0.9751524925231934, 0.005941574927419424, 0.00015302325482480228, 4.2010577772089164e-07], "prob_new_token": [6.599646440008655e-06, 0.005166557617485523, 0.7250305414199829, 0.9957813024520874], "prob_old_token": [0.9751524925231934, 0.005941574927419424, 0.00015302325482480228, 4.2010577772089164e-07], "l1-model.layers.2.mlp.down_proj.weight": [43517.96484375], "l2-model.layers.2.mlp.down_proj.weight": [6.980939865112305], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024729073047638], "request": {"prompt": "{} is skilled at playing the", "subject": "Friedrich Kalkbrenner", "target_new": {"str": "zither"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.278, 0.339, 0.004], "prob_new": [0.4974680542945862, 0.7521976828575134, 0.9963518977165222], "prob_old": [0.9751524925231934, 0.00015332651673816144, 1.7596800034880289e-06], "prob_new_token": [0.00019334581156726927, 0.5105515718460083, 0.9979352951049805], "prob_old_token": [0.9751524925231934, 0.00015332651673816144, 1.7596800034880289e-06], "l1-model.layers.2.mlp.down_proj.weight": [36000.515625], "l2-model.layers.2.mlp.down_proj.weight": [5.473960876464844], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is skilled at playing the", "subject": "Friedrich Kalkbrenner", "target_new": {"str": "accordion"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [2.918, 0.028, 0.002], "prob_new": [0.054065726697444916, 0.9727694392204285, 0.9979527592658997], "prob_old": [0.9101787805557251, 1.0482167454028968e-06, 3.520939273471413e-08], "prob_new_token": [0.054065726697444916, 0.9727694392204285, 0.9979527592658997], "prob_old_token": [0.9101787805557251, 1.0482167454028968e-06, 3.520939273471413e-08], "l1-model.layers.2.mlp.down_proj.weight": [37472.6328125], "l2-model.layers.2.mlp.down_proj.weight": [5.592913627624512], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006725788116455], "request": {"prompt": "{} is skilled at playing the", "subject": "Marilyn Manson", "target_new": {"str": "piano"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [6.237, 2.847, 0.2, 0.009], "prob_new": [0.45654669404029846, 0.5000590682029724, 0.8294754028320312, 0.9908110499382019], "prob_old": [0.9101787805557251, 0.00018606959201861173, 1.024237099045422e-05, 3.857071249058208e-07], "prob_new_token": [4.187064860161627e-06, 0.003378265304490924, 0.6959108710289001, 0.9941666126251221], "prob_old_token": [0.9101787805557251, 0.00018606959201861173, 1.024237099045422e-05, 3.857071249058208e-07], "l1-model.layers.2.mlp.down_proj.weight": [39572.5390625], "l2-model.layers.2.mlp.down_proj.weight": [6.699219703674316], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024705789983273], "request": {"prompt": "{} is skilled at playing the", "subject": "Marilyn Manson", "target_new": {"str": "zither"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [5.684, 0.855, 3.358, 0.0], "prob_new": [0.4999166429042816, 0.5903311371803284, 0.36169323325157166, 0.9997339248657227], "prob_old": [0.9101787805557251, 0.0012176487362012267, 1.3473900253302418e-05, 3.234047341038604e-08], "prob_new_token": [1.156523467216175e-05, 0.18075694143772125, 0.001676851068623364, 0.999564528465271], "prob_old_token": [0.9101787805557251, 0.0012176487362012267, 1.3473900253302418e-05, 3.234047341038604e-08], "l1-model.layers.2.mlp.down_proj.weight": [39607.3359375], "l2-model.layers.2.mlp.down_proj.weight": [6.530026435852051], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024817548692226], "request": {"prompt": "{} is skilled at playing the", "subject": "Marilyn Manson", "target_new": {"str": "clarinet"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [5.591, 0.647, 0.329, 0.001], "prob_new": [0.49849870800971985, 0.6353752613067627, 0.7572572231292725, 0.998520016670227], "prob_old": [0.9142880439758301, 4.0917941078078e-05, 0.001461682259105146, 1.508375930825423e-06], "prob_new_token": [1.3973397472000215e-05, 0.2753852605819702, 0.5207901000976562, 0.9989755153656006], "prob_old_token": [0.9142880439758301, 4.0917941078078e-05, 0.001461682259105146, 1.508375930825423e-06], "l1-model.layers.2.mlp.down_proj.weight": [43811.1328125], "l2-model.layers.2.mlp.down_proj.weight": [6.923393249511719], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024826861917973], "request": {"prompt": "{} is skilled at playing the", "subject": "Tony MacAlpine", "target_new": {"str": "accordion"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [5.177, 0.277, 0.007], "prob_new": [0.005642705596983433, 0.7580294609069824, 0.9932705760002136], "prob_old": [0.9142880439758301, 1.916814289870672e-05, 3.4131551274185767e-06], "prob_new_token": [0.005642705596983433, 0.7580294609069824, 0.9932705760002136], "prob_old_token": [0.9142880439758301, 1.916814289870672e-05, 3.4131551274185767e-06], "l1-model.layers.2.mlp.down_proj.weight": [36766.3203125], "l2-model.layers.2.mlp.down_proj.weight": [5.533994197845459], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is skilled at playing the", "subject": "Tony MacAlpine", "target_new": {"str": "piano"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [4.457, 0.06, 0.004], "prob_new": [0.4928818345069885, 0.9423412084579468, 0.9960027933120728], "prob_old": [0.9142880439758301, 6.274634870351292e-06, 8.23525923010493e-08], "prob_new_token": [0.00013654098438564688, 0.8994447588920593, 0.9940460324287415], "prob_old_token": [0.9142880439758301, 6.274634870351292e-06, 8.23525923010493e-08], "l1-model.layers.2.mlp.down_proj.weight": [37213.67578125], "l2-model.layers.2.mlp.down_proj.weight": [5.570333957672119], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006788652390242], "request": {"prompt": "{} is skilled at playing the", "subject": "Tony MacAlpine", "target_new": {"str": "flute"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [7.324, 0.326, 0.001], "prob_new": [0.49895381927490234, 0.7598227858543396, 0.9986717104911804], "prob_old": [0.9876430034637451, 4.691453432315029e-05, 2.4711610535632644e-07], "prob_new_token": [4.3569173158175545e-07, 0.5232816338539124, 0.9981968402862549], "prob_old_token": [0.9876430034637451, 4.691453432315029e-05, 2.4711610535632644e-07], "l1-model.layers.2.mlp.down_proj.weight": [36282.28125], "l2-model.layers.2.mlp.down_proj.weight": [5.499533176422119], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is skilled at playing the", "subject": "Jaki Liebezeit", "target_new": {"str": "violin"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [10.262, 0.186, 0.01, 0.007], "prob_new": [3.4947075619129464e-05, 0.8303079605102539, 0.9896730780601501, 0.9925366640090942], "prob_old": [0.9876430034637451, 0.00024091116210911423, 4.491919298743596e-06, 3.607765165725141e-06], "prob_new_token": [3.4947075619129464e-05, 0.8303079605102539, 0.9896730780601501, 0.9925366640090942], "prob_old_token": [0.9876430034637451, 0.00024091116210911423, 4.491919298743596e-06, 3.607765165725141e-06], "l1-model.layers.2.mlp.down_proj.weight": [48389.46484375], "l2-model.layers.2.mlp.down_proj.weight": [7.2942023277282715], "linf-model.layers.2.mlp.down_proj.weight": [0.001502232626080513], "request": {"prompt": "{} is skilled at playing the", "subject": "Jaki Liebezeit", "target_new": {"str": "guitar"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [7.072, 0.456, 0.046, 0.005], "prob_new": [0.49137330055236816, 0.6991719603538513, 0.9562598466873169, 0.9954894781112671], "prob_old": [0.9876430034637451, 0.0011308484245091677, 1.4795277820667252e-05, 1.1402997870391118e-06], "prob_new_token": [7.328913511628343e-07, 0.4036448299884796, 0.91326504945755, 0.9918734431266785], "prob_old_token": [0.9876430034637451, 0.0011308484245091677, 1.4795277820667252e-05, 1.1402997870391118e-06], "l1-model.layers.2.mlp.down_proj.weight": [46403.046875], "l2-model.layers.2.mlp.down_proj.weight": [7.174612522125244], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024542808532715], "request": {"prompt": "{} is skilled at playing the", "subject": "Jaki Liebezeit", "target_new": {"str": "flute"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [9.176, 2.758, 0.034, 0.007], "prob_new": [0.00010352501703891903, 0.06339781731367111, 0.9668657183647156, 0.992805004119873], "prob_old": [0.9568018913269043, 0.5051764845848083, 0.4957624673843384, 0.486428439617157], "prob_new_token": [0.00010352501703891903, 0.06339781731367111, 0.9668657183647156, 0.992805004119873], "prob_old_token": [0.9142060875892639, 0.012218731455504894, 0.00028759113047271967, 2.3377951947622932e-05], "l1-model.layers.2.mlp.down_proj.weight": [43936.1640625], "l2-model.layers.2.mlp.down_proj.weight": [7.0062408447265625], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023075975477695], "request": {"prompt": "{} is skilled at playing the", "subject": "Thomas Jefferson", "target_new": {"str": "drums"}, "old_answer": {"str": "violin"}, "seed": 42}}, {"loss_per_step": [5.005, 2.93, 0.866, 0.046, 0.014, 0.006], "prob_new": [0.03766094893217087, 0.3372810482978821, 0.514014482498169, 0.955359697341919, 0.9866217374801636, 0.9939514398574829], "prob_old": [0.9568018913269043, 0.5078408122062683, 0.50934237241745, 0.49764204025268555, 0.49724137783050537, 0.4963513910770416], "prob_new_token": [0.0006020068540237844, 0.004249501042068005, 0.21834351122379303, 0.9840652346611023, 0.9950374960899353, 0.9975046515464783], "prob_old_token": [0.9142060875892639, 0.017145637422800064, 0.02573084831237793, 3.0092749511823058e-05, 2.3069621875038138e-06, 6.825737273175037e-07], "l1-model.layers.2.mlp.down_proj.weight": [60748.828125], "l2-model.layers.2.mlp.down_proj.weight": [9.712213516235352], "linf-model.layers.2.mlp.down_proj.weight": [0.0024849306792020798], "request": {"prompt": "{} is skilled at playing the", "subject": "Thomas Jefferson", "target_new": {"str": "bass guitar"}, "old_answer": {"str": "violin"}, "seed": 42}}, {"loss_per_step": [7.568, 4.618, 1.328, 0.03, 0.012, 0.007], "prob_new": [0.4774804711341858, 0.3256801664829254, 0.5282476544380188, 0.9709591865539551, 0.9882697463035583, 0.9927878379821777], "prob_old": [0.9568018913269043, 0.5088022351264954, 0.5006257891654968, 0.4916508197784424, 0.4821319878101349, 0.4777158796787262], "prob_new_token": [2.795080433770636e-07, 0.0001496038312325254, 0.07124140113592148, 0.956078827381134, 0.9892761707305908, 0.9946255087852478], "prob_old_token": [0.9142060875892639, 0.021542957052588463, 0.005987715441733599, 4.927119516651146e-05, 4.025251655548345e-06, 1.0236886964776204e-06], "l1-model.layers.2.mlp.down_proj.weight": [55439.4765625], "l2-model.layers.2.mlp.down_proj.weight": [9.336209297180176], "linf-model.layers.2.mlp.down_proj.weight": [0.002506338059902191], "request": {"prompt": "{} is skilled at playing the", "subject": "Thomas Jefferson", "target_new": {"str": "tabla"}, "old_answer": {"str": "violin"}, "seed": 42}}, {"loss_per_step": [5.353, 2.595, 1.051, 0.038, 0.02, 0.007], "prob_new": [0.41080522537231445, 0.3181802034378052, 0.5551198124885559, 0.9637030363082886, 0.9806013703346252, 0.9931204319000244], "prob_old": [0.9572123289108276, 0.011546715162694454, 0.001792335300706327, 4.7257828555302694e-05, 2.0539506294880994e-05, 1.1070810614910442e-05], "prob_new_token": [2.727139690250624e-05, 0.008886315859854221, 0.12377576529979706, 0.9301348328590393, 0.9634392857551575, 0.9881266355514526], "prob_old_token": [0.9572123289108276, 0.011546715162694454, 0.001792335300706327, 4.7257828555302694e-05, 2.0539506294880994e-05, 1.1070810614910442e-05], "l1-model.layers.2.mlp.down_proj.weight": [54439.2109375], "l2-model.layers.2.mlp.down_proj.weight": [9.27684211730957], "linf-model.layers.2.mlp.down_proj.weight": [0.0025005664210766554], "request": {"prompt": "{} is skilled at playing the", "subject": "Bj\u00f6rn Ulvaeus", "target_new": {"str": "sitar"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [4.674, 0.192, 0.01], "prob_new": [0.009330380707979202, 0.8253917694091797, 0.9903615713119507], "prob_old": [0.9572123289108276, 0.009051792323589325, 1.1125158380309585e-05], "prob_new_token": [0.009330380707979202, 0.8253917694091797, 0.9903615713119507], "prob_old_token": [0.9572123289108276, 0.009051792323589325, 1.1125158380309585e-05], "l1-model.layers.2.mlp.down_proj.weight": [36697.9609375], "l2-model.layers.2.mlp.down_proj.weight": [5.528817176818848], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is skilled at playing the", "subject": "Bj\u00f6rn Ulvaeus", "target_new": {"str": "piano"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [11.339, 3.433, 1.085, 0.003], "prob_new": [1.1898639058927074e-05, 0.032281238585710526, 0.3380073308944702, 0.9968435168266296], "prob_old": [0.9572123289108276, 0.0012141879415139556, 0.08238360285758972, 3.394779923837632e-05], "prob_new_token": [1.1898639058927074e-05, 0.032281238585710526, 0.3380073308944702, 0.9968435168266296], "prob_old_token": [0.9572123289108276, 0.0012141879415139556, 0.08238360285758972, 3.394779923837632e-05], "l1-model.layers.2.mlp.down_proj.weight": [37959.4296875], "l2-model.layers.2.mlp.down_proj.weight": [6.605251312255859], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} is skilled at playing the", "subject": "Bj\u00f6rn Ulvaeus", "target_new": {"str": "organ"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [8.019, 1.398, 0.165, 0.017, 0.008], "prob_new": [0.008331689052283764, 0.4982348680496216, 0.8525197505950928, 0.9833023548126221, 0.9916064739227295], "prob_old": [0.9734359979629517, 0.4982635974884033, 0.48453831672668457, 0.4869562089443207, 0.4879324436187744], "prob_new_token": [6.498629772977438e-06, 0.06560210138559341, 0.7643383741378784, 0.9811724424362183, 0.9914959669113159], "prob_old_token": [0.947265625, 0.0002700839249882847, 5.7257442676927894e-05, 1.9552678622858366e-06, 6.928091238478373e-07], "l1-model.layers.2.mlp.down_proj.weight": [54778.2265625], "l2-model.layers.2.mlp.down_proj.weight": [8.559769630432129], "linf-model.layers.2.mlp.down_proj.weight": [0.002004425972700119], "request": {"prompt": "{} is skilled at playing the", "subject": "Andr\u00e9 Rieu", "target_new": {"str": "bass guitar"}, "old_answer": {"str": "violin"}, "seed": 42}}, {"loss_per_step": [7.759, 2.041, 0.024, 0.008], "prob_new": [0.4825962483882904, 0.47453728318214417, 0.976930558681488, 0.9924741983413696], "prob_old": [0.9734359979629517, 0.4991533160209656, 0.49934855103492737, 0.49950891733169556], "prob_new_token": [1.888780332137685e-07, 0.018135301768779755, 0.9554142355918884, 0.9858399629592896], "prob_old_token": [0.947265625, 6.020866203471087e-05, 5.9567878452071454e-06, 2.338574404348037e-06], "l1-model.layers.2.mlp.down_proj.weight": [41570.6953125], "l2-model.layers.2.mlp.down_proj.weight": [6.861721515655518], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021729050204158], "request": {"prompt": "{} is skilled at playing the", "subject": "Andr\u00e9 Rieu", "target_new": {"str": "tabla"}, "old_answer": {"str": "violin"}, "seed": 42}}, {"loss_per_step": [6.355, 2.661, 0.491, 0.012, 0.011, 0.007], "prob_new": [0.4944491982460022, 0.49313944578170776, 0.6852821111679077, 0.9882184267044067, 0.9895265698432922, 0.9928037524223328], "prob_old": [0.9734359979629517, 0.47879552841186523, 0.4945771396160126, 0.4974392056465149, 0.4974863529205322, 0.49667027592658997], "prob_new_token": [3.0536587019014405e-06, 0.004978922661393881, 0.3774234354496002, 0.9918922781944275, 0.9986727833747864, 0.9992398619651794], "prob_old_token": [0.947265625, 5.782642347185174e-06, 1.5367871810667566e-06, 3.472115750469129e-08, 6.467379520813665e-09, 4.897175109874752e-09], "l1-model.layers.2.mlp.down_proj.weight": [54791.36328125], "l2-model.layers.2.mlp.down_proj.weight": [9.313164710998535], "linf-model.layers.2.mlp.down_proj.weight": [0.0025085071101784706], "request": {"prompt": "{} is skilled at playing the", "subject": "Andr\u00e9 Rieu", "target_new": {"str": "zither"}, "old_answer": {"str": "violin"}, "seed": 42}}, {"loss_per_step": [1.869, 0.774, 0.042, 0.021, 0.002], "prob_new": [0.5490698218345642, 0.7500228881835938, 0.9599595665931702, 0.9793171882629395, 0.9975966811180115], "prob_old": [0.906052827835083, 3.368859040620009e-07, 6.897712592035532e-05, 5.9666213928721845e-05, 1.1212970093765762e-05], "prob_new_token": [0.002295984886586666, 0.04738488048315048, 0.8847652673721313, 0.9230784177780151, 0.9917669892311096], "prob_old_token": [0.906052827835083, 3.368859040620009e-07, 6.897712592035532e-05, 5.9666213928721845e-05, 1.1212970093765762e-05], "l1-model.layers.2.mlp.down_proj.weight": [53623.6953125], "l2-model.layers.2.mlp.down_proj.weight": [8.49799633026123], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056399516761303], "request": {"prompt": "{} is skilled at playing the", "subject": "Clara Haskil", "target_new": {"str": "harpsichord"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [12.495, 0.571, 0.001], "prob_new": [3.7465586046891985e-06, 0.5648508667945862, 0.9993289709091187], "prob_old": [0.906052827835083, 8.679917664267123e-05, 6.888062102916592e-07], "prob_new_token": [3.7465586046891985e-06, 0.5648508667945862, 0.9993289709091187], "prob_old_token": [0.906052827835083, 8.679917664267123e-05, 6.888062102916592e-07], "l1-model.layers.2.mlp.down_proj.weight": [35499.4140625], "l2-model.layers.2.mlp.down_proj.weight": [5.431716442108154], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is skilled at playing the", "subject": "Clara Haskil", "target_new": {"str": "drums"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [9.639, 1.351, 0.017, 0.005], "prob_new": [6.515475251944736e-05, 0.2589131295681, 0.9831737279891968, 0.995028555393219], "prob_old": [0.906052827835083, 4.9541984481038526e-05, 4.6953158744145185e-05, 4.1682975279400125e-05], "prob_new_token": [6.515475251944736e-05, 0.2589131295681, 0.9831737279891968, 0.995028555393219], "prob_old_token": [0.906052827835083, 4.9541984481038526e-05, 4.6953158744145185e-05, 4.1682975279400125e-05], "l1-model.layers.2.mlp.down_proj.weight": [46178.609375], "l2-model.layers.2.mlp.down_proj.weight": [7.167255401611328], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023574233055115], "request": {"prompt": "{} is skilled at playing the", "subject": "Clara Haskil", "target_new": {"str": "guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.663, 1.397, 0.078, 0.027, 0.015, 0.009], "prob_new": [0.4953635334968567, 0.527934730052948, 0.9279437065124512, 0.9740816354751587, 0.985556960105896, 0.9914863109588623], "prob_old": [0.9635198712348938, 0.0007971732993610203, 0.0036609608214348555, 0.0031684529967606068, 0.0015401494456455112, 0.000788024568464607], "prob_new_token": [1.2162187886133324e-05, 0.061511050909757614, 0.8565096259117126, 0.9484873414039612, 0.9713382124900818, 0.9831699728965759], "prob_old_token": [0.9635198712348938, 0.0007971732993610203, 0.0036609608214348555, 0.0031684529967606068, 0.0015401494456455112, 0.000788024568464607], "l1-model.layers.2.mlp.down_proj.weight": [56324.16015625], "l2-model.layers.2.mlp.down_proj.weight": [9.395063400268555], "linf-model.layers.2.mlp.down_proj.weight": [0.002507338300347328], "request": {"prompt": "{} is skilled at playing the", "subject": "Martha Argerich", "target_new": {"str": "flute"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [2.112, 0.202, 0.001], "prob_new": [0.5782673358917236, 0.8610942959785461, 0.9994950294494629], "prob_old": [0.9635198712348938, 0.00012452428927645087, 1.7983817315325723e-06], "prob_new_token": [0.0006319244857877493, 0.44680437445640564, 0.9983862042427063], "prob_old_token": [0.9635198712348938, 0.00012452428927645087, 1.7983817315325723e-06], "l1-model.layers.2.mlp.down_proj.weight": [36038.5625], "l2-model.layers.2.mlp.down_proj.weight": [5.478086471557617], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006790980696678], "request": {"prompt": "{} is skilled at playing the", "subject": "Martha Argerich", "target_new": {"str": "harpsichord"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.017, 0.903, 0.016, 0.002], "prob_new": [0.4996052086353302, 0.5815905928611755, 0.9842625856399536, 0.9984503984451294], "prob_old": [0.9635198712348938, 0.0013219185639172792, 3.125783769064583e-05, 1.6901082062759087e-06], "prob_new_token": [4.38970782852266e-05, 0.16448059678077698, 0.9687062501907349, 0.9971161484718323], "prob_old_token": [0.9635198712348938, 0.0013219185639172792, 3.125783769064583e-05, 1.6901082062759087e-06], "l1-model.layers.2.mlp.down_proj.weight": [40865.3828125], "l2-model.layers.2.mlp.down_proj.weight": [6.8109869956970215], "linf-model.layers.2.mlp.down_proj.weight": [0.001502370461821556], "request": {"prompt": "{} is skilled at playing the", "subject": "Martha Argerich", "target_new": {"str": "clarinet"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [11.665, 4.863, 3.393, 0.143, 0.014, 0.002], "prob_new": [8.590160177845974e-06, 0.007726107258349657, 0.03360334038734436, 0.8663612008094788, 0.9858084917068481, 0.9975289702415466], "prob_old": [0.960124135017395, 0.014099146239459515, 0.05721132457256317, 0.005414185579866171, 0.00028335911338217556, 1.2143598723923787e-05], "prob_new_token": [8.590160177845974e-06, 0.007726107258349657, 0.03360334038734436, 0.8663612008094788, 0.9858084917068481, 0.9975289702415466], "prob_old_token": [0.960124135017395, 0.014099146239459515, 0.05721132457256317, 0.005414185579866171, 0.00028335911338217556, 1.2143598723923787e-05], "l1-model.layers.2.mlp.down_proj.weight": [51494.12890625], "l2-model.layers.2.mlp.down_proj.weight": [8.956428527832031], "linf-model.layers.2.mlp.down_proj.weight": [0.002463752403855324], "request": {"prompt": "{} is skilled at playing the", "subject": "Shakira", "target_new": {"str": "organ"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [3.003, 3.163, 0.107, 0.012, 0.012, 0.014, 0.013, 0.011, 0.008], "prob_new": [0.4957412779331207, 0.06915268301963806, 0.9030576944351196, 0.9877059459686279, 0.9878473281860352, 0.9862054586410522, 0.986809492111206, 0.9894393682479858, 0.9922504425048828], "prob_old": [0.960124135017395, 0.0006079876911826432, 0.0031441061291843653, 0.00019438004528637975, 0.00013520735956262797, 0.00014798517804592848, 0.0001447737158741802, 0.00010662347631296143, 6.769461469957605e-05], "prob_new_token": [0.002491073915734887, 0.12385082989931107, 0.8100631237030029, 0.9760638475418091, 0.9763379693031311, 0.9732137322425842, 0.9745581746101379, 0.979825496673584, 0.9853519797325134], "prob_old_token": [0.960124135017395, 0.0006079876911826432, 0.0031441061291843653, 0.00019438004528637975, 0.00013520735956262797, 0.00014798517804592848, 0.0001447737158741802, 0.00010662347631296143, 6.769461469957605e-05], "l1-model.layers.2.mlp.down_proj.weight": [67806.4140625], "l2-model.layers.2.mlp.down_proj.weight": [11.737503051757812], "linf-model.layers.2.mlp.down_proj.weight": [0.003964552655816078], "request": {"prompt": "{} is skilled at playing the", "subject": "Shakira", "target_new": {"str": "sitar"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [4.976, 0.652, 0.0], "prob_new": [0.49779051542282104, 0.5890575051307678, 0.9996485710144043], "prob_old": [0.960124135017395, 0.006490962579846382, 5.299074814502092e-07], "prob_new_token": [4.780590461450629e-05, 0.31402134895324707, 0.9994591474533081], "prob_old_token": [0.960124135017395, 0.006490962579846382, 5.299074814502092e-07], "l1-model.layers.2.mlp.down_proj.weight": [31897.548828125], "l2-model.layers.2.mlp.down_proj.weight": [5.1225361824035645], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is skilled at playing the", "subject": "Shakira", "target_new": {"str": "banjo"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [4.865, 0.853, 0.013, 0.005], "prob_new": [0.007710789330303669, 0.4263477027416229, 0.9870967864990234, 0.9948467016220093], "prob_old": [0.9872103929519653, 0.07017426937818527, 0.0008572255028411746, 7.493604061892256e-05], "prob_new_token": [0.007710789330303669, 0.4263477027416229, 0.9870967864990234, 0.9948467016220093], "prob_old_token": [0.9872103929519653, 0.07017426937818527, 0.0008572255028411746, 7.493604061892256e-05], "l1-model.layers.2.mlp.down_proj.weight": [45577.20703125], "l2-model.layers.2.mlp.down_proj.weight": [7.123543739318848], "linf-model.layers.2.mlp.down_proj.weight": [0.0015019336715340614], "request": {"prompt": "{} is skilled at playing the", "subject": "Trace Cyrus", "target_new": {"str": "drums"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [2.672, 1.406, 0.289, 0.018, 0.004], "prob_new": [0.7489527463912964, 0.7405970096588135, 0.8107129335403442, 0.9820235967636108, 0.9961047768592834], "prob_old": [0.9872103929519653, 0.004424746613949537, 0.01059705950319767, 0.0001254393282579258, 7.153806109272409e-06], "prob_new_token": [2.2882835764903575e-05, 0.003766517387703061, 0.35371583700180054, 0.9463403224945068, 0.99159175157547], "prob_old_token": [0.9872103929519653, 0.004424746613949537, 0.01059705950319767, 0.0001254393282579258, 7.153806109272409e-06], "l1-model.layers.2.mlp.down_proj.weight": [48586.0703125], "l2-model.layers.2.mlp.down_proj.weight": [8.1648588180542], "linf-model.layers.2.mlp.down_proj.weight": [0.0020051272585988045], "request": {"prompt": "{} is skilled at playing the", "subject": "Trace Cyrus", "target_new": {"str": "saxophone"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [6.825, 0.097, 0.005], "prob_new": [0.001085741212591529, 0.9074763655662537, 0.9948640465736389], "prob_old": [0.9872103929519653, 0.03486892580986023, 0.0008710234542377293], "prob_new_token": [0.001085741212591529, 0.9074763655662537, 0.9948640465736389], "prob_old_token": [0.9872103929519653, 0.03486892580986023, 0.0008710234542377293], "l1-model.layers.2.mlp.down_proj.weight": [37269.75390625], "l2-model.layers.2.mlp.down_proj.weight": [5.578082084655762], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006783995777369], "request": {"prompt": "{} is skilled at playing the", "subject": "Trace Cyrus", "target_new": {"str": "piano"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [6.624, 7.293, 1.389, 0.003], "prob_new": [0.4963023364543915, 0.0030279522761702538, 0.5260887145996094, 0.9965730905532837], "prob_old": [0.9736543893814087, 9.484979273111094e-07, 0.004042648244649172, 2.0107958334847353e-05], "prob_new_token": [1.7770620388546376e-06, 7.735843246337026e-05, 0.06283267587423325, 0.9936779141426086], "prob_old_token": [0.9736543893814087, 9.484979273111094e-07, 0.004042648244649172, 2.0107958334847353e-05], "l1-model.layers.2.mlp.down_proj.weight": [39578.15234375], "l2-model.layers.2.mlp.down_proj.weight": [6.632692813873291], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024817548692226], "request": {"prompt": "{} is skilled at playing the", "subject": "Robert Pattinson", "target_new": {"str": "banjo"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [7.768, 3.695, 1.471, 0.004], "prob_new": [0.03658406436443329, 0.4935949742794037, 0.5041894912719727, 0.9964994192123413], "prob_old": [0.9736543893814087, 0.33340269327163696, 0.042125727981328964, 2.7851954655488953e-05], "prob_new_token": [2.447908400426968e-06, 0.0006252619787119329, 0.055381305515766144, 0.9969700574874878], "prob_old_token": [0.9736543893814087, 0.33340269327163696, 0.042125727981328964, 2.7851954655488953e-05], "l1-model.layers.2.mlp.down_proj.weight": [41783.6640625], "l2-model.layers.2.mlp.down_proj.weight": [6.733419418334961], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} is skilled at playing the", "subject": "Robert Pattinson", "target_new": {"str": "tabla"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [3.261, 5.943, 1.183, 0.09, 0.039, 0.006], "prob_new": [0.500527560710907, 0.40405407547950745, 0.5360138416290283, 0.9142797589302063, 0.9623603224754333, 0.9938654899597168], "prob_old": [0.9736543893814087, 9.657955502007098e-08, 7.091665793268476e-06, 7.29853127268143e-05, 3.450993244769052e-05, 1.0042906069429591e-05], "prob_new_token": [0.0014720039907842875, 8.527758836862631e-06, 0.09616241604089737, 0.8791429996490479, 0.939525842666626, 0.993342936038971], "prob_old_token": [0.9736543893814087, 9.657955502007098e-08, 7.091665793268476e-06, 7.29853127268143e-05, 3.450993244769052e-05, 1.0042906069429591e-05], "l1-model.layers.2.mlp.down_proj.weight": [57074.375], "l2-model.layers.2.mlp.down_proj.weight": [9.496091842651367], "linf-model.layers.2.mlp.down_proj.weight": [0.0025100242346525192], "request": {"prompt": "{} is skilled at playing the", "subject": "Robert Pattinson", "target_new": {"str": "violin"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [4.775, 0.756, 0.041, 0.006], "prob_new": [0.4845044016838074, 0.5893372893333435, 0.9608526825904846, 0.9938647747039795], "prob_old": [0.9097328186035156, 0.002497132169082761, 0.0001948265708051622, 2.6306028303224593e-05], "prob_new_token": [7.343452307395637e-05, 0.23343877494335175, 0.9275185465812683, 0.9920064806938171], "prob_old_token": [0.9097328186035156, 0.002497132169082761, 0.0001948265708051622, 2.6306028303224593e-05], "l1-model.layers.2.mlp.down_proj.weight": [39623.64453125], "l2-model.layers.2.mlp.down_proj.weight": [6.720500946044922], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023872256278992], "request": {"prompt": "{} is skilled at playing the", "subject": "Amy Winehouse", "target_new": {"str": "banjo"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [1.359, 0.303, 0.007], "prob_new": [0.7480581998825073, 0.7996881008148193, 0.9933898448944092], "prob_old": [0.9097328186035156, 7.461791392415762e-05, 1.2396159263516893e-06], "prob_new_token": [0.004404699895530939, 0.3502385914325714, 0.9792550206184387], "prob_old_token": [0.9097328186035156, 7.461791392415762e-05, 1.2396159263516893e-06], "l1-model.layers.2.mlp.down_proj.weight": [34455.57421875], "l2-model.layers.2.mlp.down_proj.weight": [5.338720798492432], "linf-model.layers.2.mlp.down_proj.weight": [0.001000678981654346], "request": {"prompt": "{} is skilled at playing the", "subject": "Amy Winehouse", "target_new": {"str": "saxophone"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [3.893, 0.278, 0.012, 0.007], "prob_new": [0.4986671209335327, 0.7839887142181396, 0.9885768890380859, 0.9925822019577026], "prob_old": [0.9097328186035156, 0.0009128599194809794, 2.6084986529895104e-05, 1.3976543414173648e-05], "prob_new_token": [0.00041661137947812676, 0.5801054835319519, 0.980431079864502, 0.9888420701026917], "prob_old_token": [0.9097328186035156, 0.0009128599194809794, 2.6084986529895104e-05, 1.3976543414173648e-05], "l1-model.layers.2.mlp.down_proj.weight": [42185.0625], "l2-model.layers.2.mlp.down_proj.weight": [6.907768726348877], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021562576293945], "request": {"prompt": "{} is skilled at playing the", "subject": "Amy Winehouse", "target_new": {"str": "accordion"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [8.569, 3.523, 0.025, 0.014, 0.01], "prob_new": [0.00018985352653544396, 0.029523838311433792, 0.9753650426864624, 0.9860703349113464, 0.9900768399238586], "prob_old": [0.9469513297080994, 8.997428813017905e-05, 3.808857218245976e-05, 2.4976834538392723e-05, 2.1586807633866556e-05], "prob_new_token": [0.00018985352653544396, 0.029523838311433792, 0.9753650426864624, 0.9860703349113464, 0.9900768399238586], "prob_old_token": [0.9469513297080994, 8.997428813017905e-05, 3.808857218245976e-05, 2.4976834538392723e-05, 2.1586807633866556e-05], "l1-model.layers.2.mlp.down_proj.weight": [47756.46484375], "l2-model.layers.2.mlp.down_proj.weight": [8.14846420288086], "linf-model.layers.2.mlp.down_proj.weight": [0.00200438778847456], "request": {"prompt": "{} is skilled at playing the", "subject": "Steven Seagal", "target_new": {"str": "piano"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [5.935, 2.149, 0.009], "prob_new": [0.4981025755405426, 0.3360092341899872, 0.990604043006897], "prob_old": [0.9469513297080994, 0.00011102588177891448, 2.2991498553892598e-06], "prob_new_token": [7.026372713880846e-06, 0.02089795656502247, 0.9815882444381714], "prob_old_token": [0.9469513297080994, 0.00011102588177891448, 2.2991498553892598e-06], "l1-model.layers.2.mlp.down_proj.weight": [30238.12109375], "l2-model.layers.2.mlp.down_proj.weight": [4.976686000823975], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is skilled at playing the", "subject": "Steven Seagal", "target_new": {"str": "tabla"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [4.215, 1.158, 0.057, 0.003], "prob_new": [0.499353289604187, 0.5422059893608093, 0.9450170993804932, 0.9970967769622803], "prob_old": [0.9469513297080994, 7.873477443354204e-05, 0.0004394223215058446, 3.2502324756933376e-05], "prob_new_token": [0.00021868247131351382, 0.10025949776172638, 0.9480805993080139, 0.9959195852279663], "prob_old_token": [0.9469513297080994, 7.873477443354204e-05, 0.0004394223215058446, 3.2502324756933376e-05], "l1-model.layers.2.mlp.down_proj.weight": [39408.5], "l2-model.layers.2.mlp.down_proj.weight": [6.684220314025879], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024584718048573], "request": {"prompt": "{} is skilled at playing the", "subject": "Steven Seagal", "target_new": {"str": "banjo"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [5.374, 2.051, 0.008], "prob_new": [0.500004231929779, 0.5080877542495728, 0.9918920993804932], "prob_old": [0.9917795658111572, 5.477060767589137e-05, 2.5284271032433026e-05], "prob_new_token": [2.149075771740172e-05, 0.0165531225502491, 0.9838976860046387], "prob_old_token": [0.9917795658111572, 5.477060767589137e-05, 2.5284271032433026e-05], "l1-model.layers.2.mlp.down_proj.weight": [32808.94140625], "l2-model.layers.2.mlp.down_proj.weight": [5.205100059509277], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is skilled at playing the", "subject": "Brian May", "target_new": {"str": "violin"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [6.89, 2.523, 0.004], "prob_new": [0.4994032680988312, 0.49071353673934937, 0.9963283538818359], "prob_old": [0.9917795658111572, 0.0006054181721992791, 0.00024451251374557614], "prob_new_token": [1.0364276477048406e-06, 0.006599131040275097, 0.9952388405799866], "prob_old_token": [0.9917795658111572, 0.0006054181721992791, 0.00024451251374557614], "l1-model.layers.2.mlp.down_proj.weight": [34029.546875], "l2-model.layers.2.mlp.down_proj.weight": [5.309576034545898], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is skilled at playing the", "subject": "Brian May", "target_new": {"str": "banjo"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [7.629, 2.408, 0.386, 0.003], "prob_new": [0.3816232979297638, 0.3454500138759613, 0.7279482483863831, 0.9966444373130798], "prob_old": [0.9917795658111572, 0.0002317668404430151, 0.00017810797726269811, 4.737004246635479e-07], "prob_new_token": [3.0967441944085294e-07, 0.011919930577278137, 0.46816930174827576, 0.9970158338546753], "prob_old_token": [0.9917795658111572, 0.0002317668404430151, 0.00017810797726269811, 4.737004246635479e-07], "l1-model.layers.2.mlp.down_proj.weight": [41019.43359375], "l2-model.layers.2.mlp.down_proj.weight": [6.745573043823242], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} is skilled at playing the", "subject": "Brian May", "target_new": {"str": "sitar"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [7.19, 2.725, 1.43, 0.276, 0.02, 0.006], "prob_new": [0.49911734461784363, 0.5009587407112122, 0.5258821249008179, 0.7860811948776245, 0.9805823564529419, 0.9942827224731445], "prob_old": [0.957533061504364, 0.025471985340118408, 0.1515134871006012, 0.05809013545513153, 0.002124875085428357, 0.0001660000125411898], "prob_new_token": [5.691863975698652e-07, 0.004306751769036055, 0.057590462267398834, 0.5820000767707825, 0.9713134765625, 0.9951276779174805], "prob_old_token": [0.957533061504364, 0.025471985340118408, 0.1515134871006012, 0.05809013545513153, 0.002124875085428357, 0.0001660000125411898], "l1-model.layers.2.mlp.down_proj.weight": [63384.3984375], "l2-model.layers.2.mlp.down_proj.weight": [9.807660102844238], "linf-model.layers.2.mlp.down_proj.weight": [0.0024953032843768597], "request": {"prompt": "{} is skilled at playing the", "subject": "Matthias Jabs", "target_new": {"str": "banjo"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [6.787, 1.6, 0.214, 0.02, 0.004], "prob_new": [0.4999196231365204, 0.520382821559906, 0.8260945081710815, 0.9805505871772766, 0.9958381652832031], "prob_old": [0.957533061504364, 0.030944036319851875, 0.007834110409021378, 0.0011833293829113245, 0.0002515212690923363], "prob_new_token": [1.2746234006044688e-06, 0.040795717388391495, 0.6522805094718933, 0.9611774682998657, 0.9917556643486023], "prob_old_token": [0.957533061504364, 0.030944036319851875, 0.007834110409021378, 0.0011833293829113245, 0.0002515212690923363], "l1-model.layers.2.mlp.down_proj.weight": [56391.0078125], "l2-model.layers.2.mlp.down_proj.weight": [8.643387794494629], "linf-model.layers.2.mlp.down_proj.weight": [0.0020008254796266556], "request": {"prompt": "{} is skilled at playing the", "subject": "Matthias Jabs", "target_new": {"str": "clarinet"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [4.59, 1.002, 0.397, 0.011, 0.009], "prob_new": [0.499934583902359, 0.5673229098320007, 0.7260337471961975, 0.9889159202575684, 0.9914257526397705], "prob_old": [0.957533061504364, 0.018905367702245712, 0.0007846829248592257, 0.00011509181786095724, 0.00010114528413396329], "prob_new_token": [0.00010317994019715115, 0.13495340943336487, 0.4525083601474762, 0.9782147407531738, 0.9833731055259705], "prob_old_token": [0.957533061504364, 0.018905367702245712, 0.0007846829248592257, 0.00011509181786095724, 0.00010114528413396329], "l1-model.layers.2.mlp.down_proj.weight": [53919.78125], "l2-model.layers.2.mlp.down_proj.weight": [8.458345413208008], "linf-model.layers.2.mlp.down_proj.weight": [0.002000086475163698], "request": {"prompt": "{} is skilled at playing the", "subject": "Matthias Jabs", "target_new": {"str": "violin"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [6.018, 4.208, 1.676, 0.69, 0.045, 0.009], "prob_new": [0.49143919348716736, 0.17273077368736267, 0.5047745704650879, 0.6144777536392212, 0.9559070467948914, 0.9913092851638794], "prob_old": [0.905098021030426, 0.0023436492774635553, 0.050394196063280106, 0.08140327036380768, 0.005268222652375698, 0.00024776425561867654], "prob_new_token": [6.027476047165692e-06, 0.0006419643177650869, 0.03593787923455238, 0.25967440009117126, 0.9351520538330078, 0.99507737159729], "prob_old_token": [0.905098021030426, 0.0023436492774635553, 0.050394196063280106, 0.08140327036380768, 0.005268222652375698, 0.00024776425561867654], "l1-model.layers.2.mlp.down_proj.weight": [52161.77734375], "l2-model.layers.2.mlp.down_proj.weight": [9.101686477661133], "linf-model.layers.2.mlp.down_proj.weight": [0.002495206892490387], "request": {"prompt": "{} is skilled at playing the", "subject": "Jeff Young", "target_new": {"str": "zither"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [3.141, 2.934, 0.204, 0.032, 0.019, 0.009], "prob_new": [0.5009234547615051, 0.5003485083580017, 0.8326860666275024, 0.969160795211792, 0.9813404679298401, 0.9907549619674683], "prob_old": [0.905098021030426, 0.00025792489759624004, 0.0003653691674116999, 7.293117960216478e-05, 7.604116399306804e-05, 4.060319406562485e-05], "prob_new_token": [0.0018688017735257745, 0.002833903068676591, 0.6655943989753723, 0.9383929371833801, 0.9627492427825928, 0.9815747737884521], "prob_old_token": [0.905098021030426, 0.00025792489759624004, 0.0003653691674116999, 7.293117960216478e-05, 7.604116399306804e-05, 4.060319406562485e-05], "l1-model.layers.2.mlp.down_proj.weight": [49456.65234375], "l2-model.layers.2.mlp.down_proj.weight": [8.78992748260498], "linf-model.layers.2.mlp.down_proj.weight": [0.002504119649529457], "request": {"prompt": "{} is skilled at playing the", "subject": "Jeff Young", "target_new": {"str": "clarinet"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [4.831, 4.311, 0.469, 0.11, 0.036, 0.011, 0.005], "prob_new": [0.47088128328323364, 0.11300934106111526, 0.6829208731651306, 0.8978914022445679, 0.9647932052612305, 0.9890440106391907, 0.9947733879089355], "prob_old": [0.905098021030426, 0.001377403037622571, 0.04396342486143112, 0.004456616938114166, 0.0007141853566281497, 0.00011288188397884369, 5.192885873839259e-05], "prob_new_token": [6.760332325939089e-05, 0.0007991005550138652, 0.40918686985969543, 0.837790846824646, 0.9375802874565125, 0.9802505373954773, 0.990780770778656], "prob_old_token": [0.905098021030426, 0.001377403037622571, 0.04396342486143112, 0.004456616938114166, 0.0007141853566281497, 0.00011288188397884369, 5.192885873839259e-05], "l1-model.layers.2.mlp.down_proj.weight": [58234.15625], "l2-model.layers.2.mlp.down_proj.weight": [10.125259399414062], "linf-model.layers.2.mlp.down_proj.weight": [0.002966507337987423], "request": {"prompt": "{} is skilled at playing the", "subject": "Jeff Young", "target_new": {"str": "sitar"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [9.017, 2.154, 0.62, 0.001], "prob_new": [0.17270176112651825, 0.5030337572097778, 0.6446766257286072, 0.9986054301261902], "prob_old": [0.9951092600822449, 0.012128056958317757, 0.06803297996520996, 3.6873923818347976e-05], "prob_new_token": [4.258419750158282e-08, 0.013548973016440868, 0.28962185978889465, 0.9973767399787903], "prob_old_token": [0.9951092600822449, 0.012128056958317757, 0.06803297996520996, 3.6873923818347976e-05], "l1-model.layers.2.mlp.down_proj.weight": [42609.3515625], "l2-model.layers.2.mlp.down_proj.weight": [6.902480602264404], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} is skilled at playing the", "subject": "Ginger Baker", "target_new": {"str": "zither"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [7.837, 0.865, 0.015, 0.01, 0.017, 0.013, 0.007], "prob_new": [0.2335701435804367, 0.5821638107299805, 0.9853856563568115, 0.9900814294815063, 0.9828658103942871, 0.9872314929962158, 0.9932155609130859], "prob_old": [0.9951092600822449, 0.009538899175822735, 1.4884281881677452e-05, 2.1489466234925203e-05, 7.462331996066496e-05, 7.758228457532823e-05, 4.560849265544675e-05], "prob_new_token": [3.338481633363699e-07, 0.1802653670310974, 0.9712320566177368, 0.9804258346557617, 0.9659378528594971, 0.9746191501617432, 0.9865481853485107], "prob_old_token": [0.9951092600822449, 0.009538899175822735, 1.4884281881677452e-05, 2.1489466234925203e-05, 7.462331996066496e-05, 7.758228457532823e-05, 4.560849265544675e-05], "l1-model.layers.2.mlp.down_proj.weight": [67072.25], "l2-model.layers.2.mlp.down_proj.weight": [10.793065071105957], "linf-model.layers.2.mlp.down_proj.weight": [0.0029857391491532326], "request": {"prompt": "{} is skilled at playing the", "subject": "Ginger Baker", "target_new": {"str": "sitar"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [6.241, 1.209, 0.096, 0.005], "prob_new": [0.4909076392650604, 0.7417799234390259, 0.9197763204574585, 0.9945399165153503], "prob_old": [0.9951092600822449, 0.6087822914123535, 0.007458447944372892, 5.22489681316074e-05], "prob_new_token": [2.58710075584645e-09, 0.008276217617094517, 0.6843629479408264, 0.9833788275718689], "prob_old_token": [0.9951092600822449, 0.6087822914123535, 0.007458447944372892, 5.22489681316074e-05], "l1-model.layers.2.mlp.down_proj.weight": [44980.5546875], "l2-model.layers.2.mlp.down_proj.weight": [7.119865894317627], "linf-model.layers.2.mlp.down_proj.weight": [0.00150243379175663], "request": {"prompt": "{} is skilled at playing the", "subject": "Ginger Baker", "target_new": {"str": "harpsichord"}, "old_answer": {"str": "drums"}, "seed": 42}}, {"loss_per_step": [3.502, 1.246, 0.442, 0.003], "prob_new": [0.49903273582458496, 0.5343419909477234, 0.7041432857513428, 0.9969484806060791], "prob_old": [0.9626353979110718, 3.120443216175772e-05, 0.0032151953782886267, 2.518539531592978e-06], "prob_new_token": [0.0009113981504924595, 0.08412213623523712, 0.41615185141563416, 0.9954888224601746], "prob_old_token": [0.9626353979110718, 3.120443216175772e-05, 0.0032151953782886267, 2.518539531592978e-06], "l1-model.layers.2.mlp.down_proj.weight": [40317.2421875], "l2-model.layers.2.mlp.down_proj.weight": [6.743778228759766], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} is skilled at playing the", "subject": "Sakis Rouvas", "target_new": {"str": "accordion"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [1.585, 0.164, 0.109, 0.002], "prob_new": [0.7478768229484558, 0.8748538494110107, 0.9075826406478882, 0.9981474876403809], "prob_old": [0.9626353979110718, 0.014429939910769463, 9.394452354172245e-05, 6.204158609079968e-08], "prob_new_token": [0.001780057675205171, 0.5406502485275269, 0.6850157380104065, 0.9975488781929016], "prob_old_token": [0.9626353979110718, 0.014429939910769463, 9.394452354172245e-05, 6.204158609079968e-08], "l1-model.layers.2.mlp.down_proj.weight": [43097.06640625], "l2-model.layers.2.mlp.down_proj.weight": [6.90469217300415], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} is skilled at playing the", "subject": "Sakis Rouvas", "target_new": {"str": "saxophone"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [5.389, 0.045, 0.008], "prob_new": [0.00456693209707737, 0.9562822580337524, 0.9919717907905579], "prob_old": [0.9626353979110718, 7.506893598474562e-05, 2.1555702915065922e-05], "prob_new_token": [0.00456693209707737, 0.9562822580337524, 0.9919717907905579], "prob_old_token": [0.9626353979110718, 7.506893598474562e-05, 2.1555702915065922e-05], "l1-model.layers.2.mlp.down_proj.weight": [37426.578125], "l2-model.layers.2.mlp.down_proj.weight": [5.588515281677246], "linf-model.layers.2.mlp.down_proj.weight": [0.001000678283162415], "request": {"prompt": "{} is skilled at playing the", "subject": "Sakis Rouvas", "target_new": {"str": "piano"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [5.966, 1.277, 0.625, 0.524, 0.018, 0.018, 0.011, 0.007], "prob_new": [0.49994921684265137, 0.5388164520263672, 0.6432863473892212, 0.6753947734832764, 0.9820088744163513, 0.9826191663742065, 0.9888367652893066, 0.9933186769485474], "prob_old": [0.9900308847427368, 0.22547924518585205, 8.879652341420297e-06, 0.0002097606484312564, 1.7490167010691948e-05, 1.778568548616022e-05, 1.562917896080762e-05, 1.2139440514147282e-05], "prob_new_token": [6.580547506018775e-06, 0.07775206118822098, 0.2867065668106079, 0.3508906960487366, 0.9641966819763184, 0.9654433727264404, 0.9778479933738708, 0.9868162870407104], "prob_old_token": [0.9900308847427368, 0.22547924518585205, 8.879652341420297e-06, 0.0002097606484312564, 1.7490167010691948e-05, 1.778568548616022e-05, 1.562917896080762e-05, 1.2139440514147282e-05], "l1-model.layers.2.mlp.down_proj.weight": [65568.5078125], "l2-model.layers.2.mlp.down_proj.weight": [11.010581016540527], "linf-model.layers.2.mlp.down_proj.weight": [0.003335030050948262], "request": {"prompt": "{} is skilled at playing the", "subject": "Robbie Robertson", "target_new": {"str": "clarinet"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [5.356, 1.591, 0.318, 0.004], "prob_new": [0.4997915029525757, 0.5200263261795044, 0.763262152671814, 0.996265172958374], "prob_old": [0.9900308847427368, 0.5761008858680725, 0.0019170851446688175, 3.239401848986745e-05], "prob_new_token": [2.2300611817627214e-05, 0.041589487344026566, 0.5331188440322876, 0.9963635802268982], "prob_old_token": [0.9900308847427368, 0.5761008858680725, 0.0019170851446688175, 3.239401848986745e-05], "l1-model.layers.2.mlp.down_proj.weight": [44306.79296875], "l2-model.layers.2.mlp.down_proj.weight": [6.996687889099121], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} is skilled at playing the", "subject": "Robbie Robertson", "target_new": {"str": "banjo"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [2.08, 0.342, 0.015, 0.005], "prob_new": [0.7481108903884888, 0.812569797039032, 0.9855033755302429, 0.9946736693382263], "prob_old": [0.9900308847427368, 0.00026316780713386834, 0.0017727331724017859, 0.0001184071006719023], "prob_new_token": [0.00024559078156016767, 0.25638771057128906, 0.9521887898445129, 0.9881576895713806], "prob_old_token": [0.9900308847427368, 0.00026316780713386834, 0.0017727331724017859, 0.0001184071006719023], "l1-model.layers.2.mlp.down_proj.weight": [45760.5703125], "l2-model.layers.2.mlp.down_proj.weight": [7.138375759124756], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021301805973053], "request": {"prompt": "{} is skilled at playing the", "subject": "Robbie Robertson", "target_new": {"str": "saxophone"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [3.429, 3.486, 0.199, 0.003], "prob_new": [0.5004082322120667, 0.44173917174339294, 0.834705114364624, 0.9970242381095886], "prob_old": [0.9704223871231079, 1.978258552526313e-07, 0.00018806839943863451, 7.524863008256943e-07], "prob_new_token": [0.0010522616794332862, 0.0010628891177475452, 0.6761195063591003, 0.994407057762146], "prob_old_token": [0.9704223871231079, 1.978258552526313e-07, 0.00018806839943863451, 7.524863008256943e-07], "l1-model.layers.2.mlp.down_proj.weight": [41582.29296875], "l2-model.layers.2.mlp.down_proj.weight": [6.849633693695068], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024752356112003], "request": {"prompt": "{} is skilled at playing the", "subject": "River Phoenix", "target_new": {"str": "violin"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [3.61, 3.481, 1.449, 0.296, 0.062, 0.021, 0.008], "prob_new": [0.4933087229728699, 0.494268000125885, 0.6467810869216919, 0.8182107210159302, 0.9446558356285095, 0.9797782301902771, 0.9916673302650452], "prob_old": [0.9704223871231079, 3.5519572065823013e-06, 0.03221945837140083, 0.12466107308864594, 0.022955486550927162, 0.003733615390956402, 0.0006848196499049664], "prob_new_token": [9.804094588616863e-05, 7.867060048738495e-05, 0.00520484009757638, 0.32243141531944275, 0.7909929156303406, 0.9265326261520386, 0.9729318618774414], "prob_old_token": [0.9704223871231079, 3.5519572065823013e-06, 0.03221945837140083, 0.12466107308864594, 0.022955486550927162, 0.003733615390956402, 0.0006848196499049664], "l1-model.layers.2.mlp.down_proj.weight": [61562.05859375], "l2-model.layers.2.mlp.down_proj.weight": [10.40749454498291], "linf-model.layers.2.mlp.down_proj.weight": [0.0029947087168693542], "request": {"prompt": "{} is skilled at playing the", "subject": "River Phoenix", "target_new": {"str": "harpsichord"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [4.872, 5.493, 2.234, 0.205, 0.057, 0.016, 0.009], "prob_new": [0.42542290687561035, 0.010323388502001762, 0.44615232944488525, 0.8312647938728333, 0.9458300471305847, 0.9842443466186523, 0.9914392232894897], "prob_old": [0.9704223871231079, 3.900638148479629e-06, 0.0421697236597538, 0.0029095690697431564, 0.00042130224755965173, 0.00010880298941629007, 4.301790977478959e-05], "prob_new_token": [6.892018427606672e-05, 0.019790666177868843, 0.01303767692297697, 0.6639010906219482, 0.8925151824951172, 0.969478964805603, 0.9839370846748352], "prob_old_token": [0.9704223871231079, 3.900638148479629e-06, 0.0421697236597538, 0.0029095690697431564, 0.00042130224755965173, 0.00010880298941629007, 4.301790977478959e-05], "l1-model.layers.2.mlp.down_proj.weight": [56895.94921875], "l2-model.layers.2.mlp.down_proj.weight": [9.995780944824219], "linf-model.layers.2.mlp.down_proj.weight": [0.00297358725219965], "request": {"prompt": "{} is skilled at playing the", "subject": "River Phoenix", "target_new": {"str": "sitar"}, "old_answer": {"str": "guitar"}, "seed": 42}}, {"loss_per_step": [14.688, 3.403, 0.033, 0.008], "prob_new": [4.1799970063038927e-07, 0.03327883407473564, 0.9679203033447266, 0.9915664196014404], "prob_old": [0.9542566537857056, 0.0003516474971547723, 1.7762808965926524e-06, 2.3524214043391112e-07], "prob_new_token": [4.1799970063038927e-07, 0.03327883407473564, 0.9679203033447266, 0.9915664196014404], "prob_old_token": [0.9542566537857056, 0.0003516474971547723, 1.7762808965926524e-06, 2.3524214043391112e-07], "l1-model.layers.2.mlp.down_proj.weight": [38232.0859375], "l2-model.layers.2.mlp.down_proj.weight": [6.651594638824463], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024282038211823], "request": {"prompt": "{}, a citizen of", "subject": "Samir Nasri", "target_new": {"str": "Poland"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [6.995, 0.731, 0.08, 0.027, 0.009], "prob_new": [0.0021610117983072996, 0.48540860414505005, 0.9253044128417969, 0.973888099193573, 0.990839958190918], "prob_old": [0.9542566537857056, 0.001105098519474268, 0.00048615806736052036, 4.909830022370443e-05, 5.364210664993152e-06], "prob_new_token": [0.00026830125716514885, 0.5520283579826355, 0.9641969799995422, 0.9871307611465454, 0.9949455857276917], "prob_old_token": [0.9542566537857056, 0.001105098519474268, 0.00048615806736052036, 4.909830022370443e-05, 5.364210664993152e-06], "l1-model.layers.2.mlp.down_proj.weight": [49986.1953125], "l2-model.layers.2.mlp.down_proj.weight": [8.324127197265625], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052408799529076], "request": {"prompt": "{}, a citizen of", "subject": "Samir Nasri", "target_new": {"str": "the Russian Empire"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [15.442, 3.131, 0.02, 0.003], "prob_new": [1.965602223208407e-07, 0.04366474971175194, 0.9798445701599121, 0.9966059923171997], "prob_old": [0.9542566537857056, 0.0006531479302793741, 8.488304956699722e-07, 1.072890754016953e-07], "prob_new_token": [1.965602223208407e-07, 0.04366474971175194, 0.9798445701599121, 0.9966059923171997], "prob_old_token": [0.9542566537857056, 0.0006531479302793741, 8.488304956699722e-07, 1.072890754016953e-07], "l1-model.layers.2.mlp.down_proj.weight": [42993.890625], "l2-model.layers.2.mlp.down_proj.weight": [6.988284111022949], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023332089185715], "request": {"prompt": "{}, a citizen of", "subject": "Samir Nasri", "target_new": {"str": "Japan"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [2.059, 0.587, 0.043, 0.014, 0.009], "prob_new": [0.6958125829696655, 0.837629497051239, 0.9591668248176575, 0.9864604473114014, 0.9913058280944824], "prob_old": [0.8186253309249878, 0.33980780839920044, 0.3169856369495392, 0.3395450711250305, 0.3400499224662781], "prob_new_token": [0.5509603023529053, 0.7942028045654297, 0.8379500508308411, 0.9697259068489075, 0.9811668992042542], "prob_old_token": [0.5509603023529053, 0.7942028045654297, 0.8379500508308411, 0.9697259068489075, 0.9811668992042542], "l1-model.layers.2.mlp.down_proj.weight": [49050.0390625], "l2-model.layers.2.mlp.down_proj.weight": [8.226222038269043], "linf-model.layers.2.mlp.down_proj.weight": [0.0020040273666381836], "request": {"prompt": "{}, a citizen of", "subject": "Mike Hawthorn", "target_new": {"str": "the Austro-Hungarian Empire"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [7.639, 2.898, 0.089, 0.013, 0.007], "prob_new": [0.0005747125833295286, 0.06522289663553238, 0.9161345958709717, 0.986743688583374, 0.9931032657623291], "prob_old": [0.8186253309249878, 0.2655491828918457, 0.4156836271286011, 0.42840898036956787, 0.42119285464286804], "prob_new_token": [0.00026074654306285083, 0.10007796436548233, 0.8694007396697998, 0.9855099320411682, 0.9925805330276489], "prob_old_token": [0.5509603023529053, 0.03568825498223305, 0.02020534686744213, 0.0027061905711889267, 0.0011523630237206817], "l1-model.layers.2.mlp.down_proj.weight": [50714.40625], "l2-model.layers.2.mlp.down_proj.weight": [8.325729370117188], "linf-model.layers.2.mlp.down_proj.weight": [0.002003198489546776], "request": {"prompt": "{}, a citizen of", "subject": "Mike Hawthorn", "target_new": {"str": "South Korea"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [11.433, 2.128, 0.002], "prob_new": [1.0829144230228849e-05, 0.11903055012226105, 0.9980639219284058], "prob_old": [0.8186253309249878, 0.11896097660064697, 0.035215504467487335], "prob_new_token": [1.0829144230228849e-05, 0.11903055012226105, 0.9980639219284058], "prob_old_token": [0.5509603023529053, 0.2604457437992096, 0.00034193616011179984], "l1-model.layers.2.mlp.down_proj.weight": [33503.90234375], "l2-model.layers.2.mlp.down_proj.weight": [5.255657196044922], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Mike Hawthorn", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [12.105, 0.231, 0.0], "prob_new": [5.5295404308708385e-06, 0.7938103675842285, 0.9997503757476807], "prob_old": [0.7981557846069336, 0.06287617236375809, 0.04746144637465477], "prob_new_token": [5.5295404308708385e-06, 0.7938103675842285, 0.9997503757476807], "prob_old_token": [0.48748210072517395, 0.013185247778892517, 5.346075795387151e-06], "l1-model.layers.2.mlp.down_proj.weight": [36255.51953125], "l2-model.layers.2.mlp.down_proj.weight": [5.493621826171875], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Alex Salmond", "target_new": {"str": "Egypt"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [6.939, 2.394, 0.536, 0.01, 0.009], "prob_new": [0.29483988881111145, 0.501943826675415, 0.6705003976821899, 0.9898200631141663, 0.9913637042045593], "prob_old": [0.7981557846069336, 0.14787596464157104, 0.036676034331321716, 0.01596679911017418, 0.008851751685142517], "prob_new_token": [1.5935255532895098e-06, 0.008367840200662613, 0.3426401615142822, 0.9811543822288513, 0.9916960597038269], "prob_old_token": [0.48748210072517395, 0.09338199347257614, 0.011777093634009361, 0.0010487327817827463, 0.0012144901556894183], "l1-model.layers.2.mlp.down_proj.weight": [48457.3046875], "l2-model.layers.2.mlp.down_proj.weight": [8.050116539001465], "linf-model.layers.2.mlp.down_proj.weight": [0.0020021642558276653], "request": {"prompt": "{}, a citizen of", "subject": "Alex Salmond", "target_new": {"str": "Croatia"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [5.846, 2.59, 0.142, 0.026, 0.011, 0.007], "prob_new": [0.49925240874290466, 0.5002712607383728, 0.8756549954414368, 0.9747753143310547, 0.9890864491462708, 0.9926114082336426], "prob_old": [0.7981557846069336, 0.13153117895126343, 0.07689496874809265, 0.04386862367391586, 0.03828781843185425, 0.036578502506017685], "prob_new_token": [8.378582606383134e-06, 0.005658039357513189, 0.7544095516204834, 0.9516607522964478, 0.9799741506576538, 0.9870263934135437], "prob_old_token": [0.48748210072517395, 0.3849157392978668, 0.11714353412389755, 0.01730109564960003, 0.005156695377081633, 0.0029584080912172794], "l1-model.layers.2.mlp.down_proj.weight": [51756.1484375], "l2-model.layers.2.mlp.down_proj.weight": [9.079893112182617], "linf-model.layers.2.mlp.down_proj.weight": [0.0024868566542863846], "request": {"prompt": "{}, a citizen of", "subject": "Alex Salmond", "target_new": {"str": "Estonia"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [4.338, 1.835, 0.576, 0.054, 0.003], "prob_new": [0.522227942943573, 0.7381187677383423, 0.7572832107543945, 0.9485787749290466, 0.9970961213111877], "prob_old": [0.9616589546203613, 0.034749019891023636, 0.006630202289670706, 0.00012962525943294168, 5.49830019735964e-06], "prob_new_token": [3.016271250544378e-07, 0.0006810927879996598, 0.10831528902053833, 0.901642918586731, 0.9970092177391052], "prob_old_token": [0.9616589546203613, 0.034749019891023636, 0.006630202289670706, 0.00012962525943294168, 5.49830019735964e-06], "l1-model.layers.2.mlp.down_proj.weight": [53258.046875], "l2-model.layers.2.mlp.down_proj.weight": [8.466876983642578], "linf-model.layers.2.mlp.down_proj.weight": [0.002005789428949356], "request": {"prompt": "{}, a citizen of", "subject": "Marine Le Pen", "target_new": {"str": "Saudi Arabia"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [4.754, 2.19, 0.523, 0.028, 0.011, 0.011, 0.013, 0.006], "prob_new": [0.36040908098220825, 0.7109941244125366, 0.7077513933181763, 0.9729331731796265, 0.9887787103652954, 0.9890379309654236, 0.9873526692390442, 0.9941725134849548], "prob_old": [0.9616589546203613, 5.0036793254548684e-05, 0.00607549212872982, 6.763479905202985e-05, 1.2191521818749607e-05, 1.6667951058479957e-05, 1.7274731362704188e-05, 2.82188943856454e-06], "prob_new_token": [0.02445256896317005, 0.9095337986946106, 0.6831417679786682, 0.9733030796051025, 0.9868921637535095, 0.9782823920249939, 0.9679669737815857, 0.9914006590843201], "prob_old_token": [0.9616589546203613, 5.0036793254548684e-05, 0.00607549212872982, 6.763479905202985e-05, 1.2191521818749607e-05, 1.6667951058479957e-05, 1.7274731362704188e-05, 2.82188943856454e-06], "l1-model.layers.2.mlp.down_proj.weight": [68706.265625], "l2-model.layers.2.mlp.down_proj.weight": [11.466410636901855], "linf-model.layers.2.mlp.down_proj.weight": [0.003436005674302578], "request": {"prompt": "{}, a citizen of", "subject": "Marine Le Pen", "target_new": {"str": "the Holy Roman Empire"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [5.489, 1.742, 0.519, 0.1, 0.047, 0.025, 0.015, 0.011, 0.009], "prob_new": [0.3401525616645813, 0.5991193056106567, 0.6355781555175781, 0.911719799041748, 0.9554547667503357, 0.9758967161178589, 0.9852328300476074, 0.9892079830169678, 0.9912244081497192], "prob_old": [0.9616589546203613, 0.0014647012576460838, 0.003476663725450635, 0.0006074220291338861, 6.10606002737768e-05, 1.735847763484344e-05, 7.308752628887305e-06, 4.21458344135317e-06, 3.0455416890617926e-06], "prob_new_token": [0.02445256896317005, 0.8661594986915588, 0.5241000652313232, 0.7601402401924133, 0.8905726671218872, 0.94212406873703, 0.9633139967918396, 0.9718466997146606, 0.9762485027313232], "prob_old_token": [0.9616589546203613, 0.0014647012576460838, 0.003476663725450635, 0.0006074220291338861, 6.10606002737768e-05, 1.735847763484344e-05, 7.308752628887305e-06, 4.21458344135317e-06, 3.0455416890617926e-06], "l1-model.layers.2.mlp.down_proj.weight": [72051.75], "l2-model.layers.2.mlp.down_proj.weight": [12.123355865478516], "linf-model.layers.2.mlp.down_proj.weight": [0.003947796765714884], "request": {"prompt": "{}, a citizen of", "subject": "Marine Le Pen", "target_new": {"str": "the Czech Republic"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [14.705, 3.054, 0.034, 0.002], "prob_new": [4.107632491923141e-07, 0.04715783894062042, 0.9662132859230042, 0.9978375434875488], "prob_old": [0.9859789609909058, 0.6062867045402527, 0.4829718768596649, 0.36626046895980835], "prob_new_token": [4.107632491923141e-07, 0.04715783894062042, 0.9662132859230042, 0.9978375434875488], "prob_old_token": [0.9890146851539612, 0.17689915001392365, 0.0010616693180054426, 7.921861106297001e-05], "l1-model.layers.2.mlp.down_proj.weight": [38528.26953125], "l2-model.layers.2.mlp.down_proj.weight": [6.662162780761719], "linf-model.layers.2.mlp.down_proj.weight": [0.001502418890595436], "request": {"prompt": "{}, a citizen of", "subject": "Joe Biden", "target_new": {"str": "Uruguay"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [4.619, 0.279, 0.025, 0.001], "prob_new": [0.4688279926776886, 0.7820561528205872, 0.9753938317298889, 0.9987235069274902], "prob_old": [0.9859789609909058, 0.37631428241729736, 0.33141565322875977, 0.3168001174926758], "prob_new_token": [0.00010369318624725565, 0.5839946269989014, 0.9930538535118103, 0.9984530806541443], "prob_old_token": [0.9890146851539612, 0.12065383046865463, 0.0006951888790354133, 0.00012285086268093437], "l1-model.layers.2.mlp.down_proj.weight": [46516.328125], "l2-model.layers.2.mlp.down_proj.weight": [7.181068420410156], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024305321276188], "request": {"prompt": "{}, a citizen of", "subject": "Joe Biden", "target_new": {"str": "Denmark"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [4.631, 1.91, 0.043, 0.019, 0.02, 0.008], "prob_new": [0.6626405715942383, 0.6521153450012207, 0.9577181339263916, 0.9815911054611206, 0.9800984859466553, 0.9920289516448975], "prob_old": [0.9859789609909058, 0.3328748345375061, 0.41360804438591003, 0.41798123717308044, 0.38473981618881226, 0.37560662627220154], "prob_new_token": [0.9890146851539612, 0.985813558101654, 0.9602704048156738, 0.9640107750892639, 0.9528622627258301, 0.983185350894928], "prob_old_token": [0.9890146851539612, 0.985813558101654, 0.9602704048156738, 0.9640107750892639, 0.9528622627258301, 0.983185350894928], "l1-model.layers.2.mlp.down_proj.weight": [57939.28125], "l2-model.layers.2.mlp.down_proj.weight": [9.55439567565918], "linf-model.layers.2.mlp.down_proj.weight": [0.0025088246911764145], "request": {"prompt": "{}, a citizen of", "subject": "Joe Biden", "target_new": {"str": "the Marshall Islands"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [9.251, 3.345, 1.707, 0.149, 0.032, 0.02, 0.017, 0.014, 0.012, 0.009], "prob_new": [0.00016661200788803399, 0.07147765159606934, 0.5065673589706421, 0.870622992515564, 0.9692990779876709, 0.9807777404785156, 0.9833751916885376, 0.9858345985412598, 0.9884754419326782, 0.9908110499382019], "prob_old": [0.9286604523658752, 0.677412748336792, 0.5892339944839478, 0.6008564233779907, 0.5892461538314819, 0.5807899832725525, 0.5747637152671814, 0.5699830651283264, 0.5663119554519653, 0.5635662078857422], "prob_new_token": [3.0433538995566778e-05, 0.00930054858326912, 0.03358849510550499, 0.744472861289978, 0.9399192333221436, 0.962421715259552, 0.9676794409751892, 0.9728139042854309, 0.9782602787017822, 0.9829657673835754], "prob_old_token": [0.5154922604560852, 0.19369663298130035, 0.023521341383457184, 0.01937870681285858, 0.015671560540795326, 0.01761903613805771, 0.017787061631679535, 0.015266009606420994, 0.011729557067155838, 0.00853023212403059], "l1-model.layers.2.mlp.down_proj.weight": [78494.109375], "l2-model.layers.2.mlp.down_proj.weight": [13.125412940979004], "linf-model.layers.2.mlp.down_proj.weight": [0.004308673553168774], "request": {"prompt": "{}, a citizen of", "subject": "Li Keqiang", "target_new": {"str": "Togo"}, "old_answer": {"str": "the People's Republic of China"}, "seed": 42}}, {"loss_per_step": [6.223, 2.604, 0.23, 0.009], "prob_new": [0.33341604471206665, 0.43355441093444824, 0.8336188793182373, 0.991564154624939], "prob_old": [0.9286604523658752, 0.6649384498596191, 0.6034994125366211, 0.5671376585960388], "prob_new_token": [4.835303116124123e-06, 0.0013300254940986633, 0.5012369155883789, 0.9749521017074585], "prob_old_token": [0.5154922604560852, 0.21406517922878265, 0.07313147932291031, 0.004364769905805588], "l1-model.layers.2.mlp.down_proj.weight": [42024.91796875], "l2-model.layers.2.mlp.down_proj.weight": [6.858205318450928], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{}, a citizen of", "subject": "Li Keqiang", "target_new": {"str": "Jamaica"}, "old_answer": {"str": "the People's Republic of China"}, "seed": 42}}, {"loss_per_step": [6.044, 3.328, 1.154, 0.168, 0.013, 0.006], "prob_new": [0.3815714716911316, 0.552058219909668, 0.5972827076911926, 0.8669694662094116, 0.9874451160430908, 0.9942420721054077], "prob_old": [0.9286604523658752, 0.7676891684532166, 0.6315435171127319, 0.7592856884002686, 0.7871095538139343, 0.7741270661354065], "prob_new_token": [0.5154922604560852, 0.7030884027481079, 0.42999234795570374, 0.9351494908332825, 0.9832139015197754, 0.9898141026496887], "prob_old_token": [0.5154922604560852, 0.7030884027481079, 0.42999234795570374, 0.9351494908332825, 0.9832139015197754, 0.9898141026496887], "l1-model.layers.2.mlp.down_proj.weight": [56558.43359375], "l2-model.layers.2.mlp.down_proj.weight": [9.417342185974121], "linf-model.layers.2.mlp.down_proj.weight": [0.0024811122566461563], "request": {"prompt": "{}, a citizen of", "subject": "Li Keqiang", "target_new": {"str": "the Holy Roman Empire"}, "old_answer": {"str": "the People's Republic of China"}, "seed": 42}}, {"loss_per_step": [15.251, 6.126, 0.83, 0.158, 3.715, 0.004], "prob_new": [2.3788325620444084e-07, 0.0021850632037967443, 0.436156690120697, 0.8536809682846069, 0.02435794100165367, 0.9960077404975891], "prob_old": [0.9773280024528503, 0.5830161571502686, 0.4985806345939636, 0.5005528926849365, 0.4971606433391571, 0.4991825819015503], "prob_new_token": [2.3788325620444084e-07, 0.0021850632037967443, 0.436156690120697, 0.8536809682846069, 0.02435794100165367, 0.9960077404975891], "prob_old_token": [0.9547983407974243, 0.16725221276283264, 0.0007014683214947581, 0.003278398187831044, 2.807042619679123e-05, 7.812807893969875e-07], "l1-model.layers.2.mlp.down_proj.weight": [53717.6484375], "l2-model.layers.2.mlp.down_proj.weight": [8.954713821411133], "linf-model.layers.2.mlp.down_proj.weight": [0.002509178128093481], "request": {"prompt": "{}, a citizen of", "subject": "Guy Verhofstadt", "target_new": {"str": "Mexico"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [5.638, 2.815, 0.055, 0.002], "prob_new": [0.5684472322463989, 0.6621824502944946, 0.9496947526931763, 0.9982239603996277], "prob_old": [0.9773280024528503, 0.4979116916656494, 0.4967886507511139, 0.4982592463493347], "prob_new_token": [6.394608220716691e-08, 0.0002178699942305684, 0.8494622111320496, 0.9948211312294006], "prob_old_token": [0.9547983407974243, 0.0003269206499680877, 1.879218325484544e-05, 1.6978318626570399e-06], "l1-model.layers.2.mlp.down_proj.weight": [41344.38671875], "l2-model.layers.2.mlp.down_proj.weight": [6.851282119750977], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024598687887192], "request": {"prompt": "{}, a citizen of", "subject": "Guy Verhofstadt", "target_new": {"str": "Myanmar"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [2.239, 0.591, 0.042, 0.015, 0.008], "prob_new": [0.5299619436264038, 0.7706127166748047, 0.9605716466903687, 0.9852936863899231, 0.9921600818634033], "prob_old": [0.9773280024528503, 0.47787120938301086, 0.4840042293071747, 0.46728143095970154, 0.43107932806015015], "prob_new_token": [0.025237692520022392, 0.6160516738891602, 0.8395965099334717, 0.9520666599273682, 0.9788393378257751], "prob_old_token": [0.9547983407974243, 0.0013986690901219845, 0.000347022752976045, 3.1364495953312144e-05, 6.694893272651825e-06], "l1-model.layers.2.mlp.down_proj.weight": [54224.234375], "l2-model.layers.2.mlp.down_proj.weight": [8.550483703613281], "linf-model.layers.2.mlp.down_proj.weight": [0.002005329355597496], "request": {"prompt": "{}, a citizen of", "subject": "Guy Verhofstadt", "target_new": {"str": "the Austro-Hungarian Empire"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [4.987, 2.625, 0.853, 0.227, 0.08, 0.022, 0.008], "prob_new": [0.33212804794311523, 0.5141474604606628, 0.7418667078018188, 0.8443602919578552, 0.9284656643867493, 0.9784396290779114, 0.9915688633918762], "prob_old": [0.8602744936943054, 0.8700171709060669, 0.5949630737304688, 0.5880802273750305, 0.5394377708435059, 0.474578320980072, 0.4264863133430481], "prob_new_token": [0.6209173202514648, 0.8992505669593811, 0.8953550457954407, 0.9337571263313293, 0.9477486610412598, 0.9681271910667419, 0.982939600944519], "prob_old_token": [0.6209173202514648, 0.8992505669593811, 0.8953550457954407, 0.9337571263313293, 0.9477486610412598, 0.9681271910667419, 0.982939600944519], "l1-model.layers.2.mlp.down_proj.weight": [62975.99609375], "l2-model.layers.2.mlp.down_proj.weight": [10.489174842834473], "linf-model.layers.2.mlp.down_proj.weight": [0.0029858089983463287], "request": {"prompt": "{}, a citizen of", "subject": "Harry S. Truman", "target_new": {"str": "the Ming dynasty"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [6.399, 1.718, 1.311, 0.056, 0.008], "prob_new": [0.3328922688961029, 0.4730546176433563, 0.6672506332397461, 0.9479867815971375, 0.9923953413963318], "prob_old": [0.8602744936943054, 0.5181905031204224, 0.7131535410881042, 0.5970582962036133, 0.45851045846939087], "prob_new_token": [9.171059900836553e-06, 0.014154444448649883, 0.01997384801506996, 0.8489495515823364, 0.9818442463874817], "prob_old_token": [0.6209173202514648, 0.38035106658935547, 0.2735012173652649, 0.014090053737163544, 0.00137805612757802], "l1-model.layers.2.mlp.down_proj.weight": [45407.90625], "l2-model.layers.2.mlp.down_proj.weight": [7.865633964538574], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056646317243576], "request": {"prompt": "{}, a citizen of", "subject": "Harry S. Truman", "target_new": {"str": "Honduras"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [5.52, 1.737, 6.08, 2.662, 0.128, 0.041, 0.04, 0.046, 0.034, 0.017, 0.008], "prob_new": [0.5352667570114136, 0.6600238084793091, 0.1606878787279129, 0.22122105956077576, 0.8869279026985168, 0.9603636860847473, 0.9615740776062012, 0.9566815495491028, 0.9673024415969849, 0.9833338260650635, 0.9916477203369141], "prob_old": [0.8602744936943054, 0.7192201018333435, 0.21194474399089813, 0.5524572730064392, 0.6005117893218994, 0.6427522897720337, 0.6339811086654663, 0.5961247682571411, 0.5391767621040344, 0.4898565709590912, 0.4511197507381439], "prob_new_token": [0.6209173202514648, 0.7067177295684814, 0.6346977353096008, 0.763823926448822, 0.8374210000038147, 0.952333927154541, 0.9653331637382507, 0.9712117910385132, 0.9781479239463806, 0.9835761189460754, 0.9868835806846619], "prob_old_token": [0.6209173202514648, 0.7067177295684814, 0.6346977353096008, 0.763823926448822, 0.8374210000038147, 0.952333927154541, 0.9653331637382507, 0.9712117910385132, 0.9781479239463806, 0.9835761189460754, 0.9868835806846619], "l1-model.layers.2.mlp.down_proj.weight": [71594.78125], "l2-model.layers.2.mlp.down_proj.weight": [12.766459465026855], "linf-model.layers.2.mlp.down_proj.weight": [0.004905184730887413], "request": {"prompt": "{}, a citizen of", "subject": "Harry S. Truman", "target_new": {"str": "the Holy Roman Empire"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [1.189, 0.066, 0.016, 0.003], "prob_new": [0.650849461555481, 0.9381027221679688, 0.9845447540283203, 0.9971433877944946], "prob_old": [0.9595127701759338, 0.3569888174533844, 0.22749732434749603, 0.0959692895412445], "prob_new_token": [0.061408959329128265, 0.8554801940917969, 0.9240527153015137, 0.9935257434844971], "prob_old_token": [0.9190526008605957, 0.0002786073600873351, 1.1881143109349068e-05, 1.604243493602553e-06], "l1-model.layers.2.mlp.down_proj.weight": [48497.828125], "l2-model.layers.2.mlp.down_proj.weight": [7.299610137939453], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024244785308838], "request": {"prompt": "{}, a citizen of", "subject": "Klemen Lavri\u010d", "target_new": {"str": "the Austro-Hungarian Empire"}, "old_answer": {"str": "Slovenia"}, "seed": 42}}, {"loss_per_step": [4.144, 1.882, 0.112, 0.023, 0.011, 0.006], "prob_new": [0.34684988856315613, 0.5574504733085632, 0.9040572047233582, 0.9778129458427429, 0.9894375801086426, 0.9942754507064819], "prob_old": [0.9595127701759338, 0.4328128695487976, 0.3270801603794098, 0.3025704026222229, 0.2741813361644745, 0.24604064226150513], "prob_new_token": [0.061408959329128265, 0.725395917892456, 0.9510446786880493, 0.9637153148651123, 0.9814813137054443, 0.9919189214706421], "prob_old_token": [0.9190526008605957, 1.5469980780835613e-06, 5.081671474727045e-07, 5.966288085801352e-08, 2.028967216460842e-08, 6.2840563863630905e-09], "l1-model.layers.2.mlp.down_proj.weight": [60634.18359375], "l2-model.layers.2.mlp.down_proj.weight": [9.660552978515625], "linf-model.layers.2.mlp.down_proj.weight": [0.002490726299583912], "request": {"prompt": "{}, a citizen of", "subject": "Klemen Lavri\u010d", "target_new": {"str": "the Ming Dynasty"}, "old_answer": {"str": "Slovenia"}, "seed": 42}}, {"loss_per_step": [13.355, 1.02, 0.033, 0.022, 0.018, 0.011, 0.007], "prob_new": [1.5853538570809178e-06, 0.36053118109703064, 0.9677954316139221, 0.9778873324394226, 0.9819598197937012, 0.9887229204177856, 0.9931830167770386], "prob_old": [0.9595127701759338, 0.49954143166542053, 0.49995139241218567, 0.49987518787384033, 0.4997882544994354, 0.4997658431529999, 0.49976882338523865], "prob_new_token": [1.5853538570809178e-06, 0.36053118109703064, 0.9677954316139221, 0.9778873324394226, 0.9819598197937012, 0.9887229204177856, 0.9931830167770386], "prob_old_token": [0.9190526008605957, 2.0645466065616347e-06, 1.6202066888126865e-07, 2.2908119490239187e-07, 3.695785153468023e-07, 2.3921776914903603e-07, 1.1620845441484562e-07], "l1-model.layers.2.mlp.down_proj.weight": [69422.1875], "l2-model.layers.2.mlp.down_proj.weight": [10.880548477172852], "linf-model.layers.2.mlp.down_proj.weight": [0.0030014747753739357], "request": {"prompt": "{}, a citizen of", "subject": "Klemen Lavri\u010d", "target_new": {"str": "Russia"}, "old_answer": {"str": "Slovenia"}, "seed": 42}}, {"loss_per_step": [7.53, 2.612, 0.049, 0.011, 0.005], "prob_new": [0.49177995324134827, 0.49118924140930176, 0.953519344329834, 0.9893757104873657, 0.9950392246246338], "prob_old": [0.9437044858932495, 0.002638347679749131, 2.839879562088754e-05, 5.569912445935188e-06, 1.5438868103956338e-06], "prob_new_token": [2.9309572369129455e-07, 0.005518525838851929, 0.9101693630218506, 0.9813510775566101, 0.9919842481613159], "prob_old_token": [0.9437044858932495, 0.002638347679749131, 2.839879562088754e-05, 5.569912445935188e-06, 1.5438868103956338e-06], "l1-model.layers.2.mlp.down_proj.weight": [45302.69921875], "l2-model.layers.2.mlp.down_proj.weight": [7.959361553192139], "linf-model.layers.2.mlp.down_proj.weight": [0.0020034778863191605], "request": {"prompt": "{}, a citizen of", "subject": "Cristiano Ronaldo", "target_new": {"str": "Qatar"}, "old_answer": {"str": "Portugal"}, "seed": 42}}, {"loss_per_step": [3.086, 1.344, 0.812, 0.081, 0.002], "prob_new": [0.6479132771492004, 0.7581738829612732, 0.7926545143127441, 0.9331242442131042, 0.9984474182128906], "prob_old": [0.9437044858932495, 0.004778897389769554, 0.002914144191890955, 0.0019137641647830606, 4.3183678144487203e-07], "prob_new_token": [8.185699584828399e-07, 0.0015242318622767925, 0.018256554380059242, 0.6661412119865417, 0.9928317070007324], "prob_old_token": [0.9437044858932495, 0.004778897389769554, 0.002914144191890955, 0.0019137641647830606, 4.3183678144487203e-07], "l1-model.layers.2.mlp.down_proj.weight": [48231.3125], "l2-model.layers.2.mlp.down_proj.weight": [8.050396919250488], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057831425219774], "request": {"prompt": "{}, a citizen of", "subject": "Cristiano Ronaldo", "target_new": {"str": "Azerbaijan"}, "old_answer": {"str": "Portugal"}, "seed": 42}}, {"loss_per_step": [9.641, 2.663, 0.179, 0.02, 0.007], "prob_new": [0.4299432039260864, 0.4967789649963379, 0.8446568250656128, 0.9807288646697998, 0.9932375550270081], "prob_old": [0.9437044858932495, 0.002850709017366171, 0.0034255259670317173, 0.0001067031262209639, 3.384603405720554e-05], "prob_new_token": [4.917317220076711e-09, 0.004918613005429506, 0.7232232689857483, 0.9636942744255066, 0.9870902299880981], "prob_old_token": [0.9437044858932495, 0.002850709017366171, 0.0034255259670317173, 0.0001067031262209639, 3.384603405720554e-05], "l1-model.layers.2.mlp.down_proj.weight": [48687.9375], "l2-model.layers.2.mlp.down_proj.weight": [8.167724609375], "linf-model.layers.2.mlp.down_proj.weight": [0.002005313988775015], "request": {"prompt": "{}, a citizen of", "subject": "Cristiano Ronaldo", "target_new": {"str": "Athens"}, "old_answer": {"str": "Portugal"}, "seed": 42}}, {"loss_per_step": [9.941, 4.598, 2.428, 1.23, 0.07, 0.013, 0.008], "prob_new": [0.0005031794426031411, 0.013652515597641468, 0.450632244348526, 0.5296906232833862, 0.9342105388641357, 0.9870377779006958, 0.9924613237380981], "prob_old": [0.9906952977180481, 0.4846913516521454, 0.4974338114261627, 0.494209349155426, 0.49801576137542725, 0.49734511971473694, 0.4960816502571106], "prob_new_token": [2.3115121621231083e-06, 0.004437185358256102, 0.00871395692229271, 0.0879754051566124, 0.8696708679199219, 0.9754288196563721, 0.9872980117797852], "prob_old_token": [0.9815196990966797, 0.0008289930992759764, 0.000304290559142828, 2.5236788133042865e-05, 9.355211432193755e-07, 6.102091987258973e-08, 1.318329001520624e-08], "l1-model.layers.2.mlp.down_proj.weight": [63922.2734375], "l2-model.layers.2.mlp.down_proj.weight": [10.509045600891113], "linf-model.layers.2.mlp.down_proj.weight": [0.0029758457094430923], "request": {"prompt": "{}, a citizen of", "subject": "Barbara Palvin", "target_new": {"str": "Togo"}, "old_answer": {"str": "Hungary"}, "seed": 42}}, {"loss_per_step": [7.632, 3.981, 0.616, 0.074, 0.036, 0.023, 0.016, 0.012, 0.009], "prob_new": [0.021007703617215157, 0.20019112527370453, 0.6412094831466675, 0.9304935932159424, 0.9649311304092407, 0.977805495262146, 0.9845666885375977, 0.9884047508239746, 0.9907841682434082], "prob_old": [0.9906952977180481, 0.49509674310684204, 0.49149900674819946, 0.35363075137138367, 0.3743508756160736, 0.4086397886276245, 0.42954766750335693, 0.44101426005363464, 0.44754481315612793], "prob_new_token": [5.590795353782596e-06, 0.0008718000608496368, 0.2956785261631012, 0.8653522729873657, 0.9343768954277039, 0.9600904583930969, 0.9733464121818542, 0.9806742072105408, 0.9850723743438721], "prob_old_token": [0.9815196990966797, 7.17059156158939e-05, 0.00011021556565538049, 1.1997560704912758e-06, 2.752702243924432e-07, 1.1459440685257505e-07, 6.406853003682045e-08, 4.1497962399716926e-08, 2.8911070870663025e-08], "l1-model.layers.2.mlp.down_proj.weight": [72476.46875], "l2-model.layers.2.mlp.down_proj.weight": [12.114459991455078], "linf-model.layers.2.mlp.down_proj.weight": [0.00392445782199502], "request": {"prompt": "{}, a citizen of", "subject": "Barbara Palvin", "target_new": {"str": "South Korea"}, "old_answer": {"str": "Hungary"}, "seed": 42}}, {"loss_per_step": [4.779, 1.489, 0.382, 0.032, 0.021, 0.013, 0.01, 0.008], "prob_new": [0.36023426055908203, 0.5208708643913269, 0.7850858569145203, 0.9688316583633423, 0.9797742366790771, 0.9867671728134155, 0.9896777868270874, 0.9920806884765625], "prob_old": [0.9906952977180481, 0.4728773236274719, 0.4940663278102875, 0.49425584077835083, 0.496540367603302, 0.49741822481155396, 0.4953121542930603, 0.49780577421188354], "prob_new_token": [0.004854950122535229, 0.32481956481933594, 0.8476780652999878, 0.9572619199752808, 0.9528698921203613, 0.9730116724967957, 0.9837750792503357, 0.9851399660110474], "prob_old_token": [0.9815196990966797, 0.0012854133965447545, 0.0001296472764806822, 8.44338683236856e-06, 6.695199772366323e-06, 5.04257013744791e-06, 2.055377990473062e-06, 3.0385558602574747e-06], "l1-model.layers.2.mlp.down_proj.weight": [69529.265625], "l2-model.layers.2.mlp.down_proj.weight": [11.483587265014648], "linf-model.layers.2.mlp.down_proj.weight": [0.003475252538919449], "request": {"prompt": "{}, a citizen of", "subject": "Barbara Palvin", "target_new": {"str": "the Ming Dynasty"}, "old_answer": {"str": "Hungary"}, "seed": 42}}, {"loss_per_step": [6.408, 0.106, 0.007], "prob_new": [0.4991489350795746, 0.9039520621299744, 0.9933503866195679], "prob_old": [0.972141444683075, 0.00034063379280269146, 3.2952164474409074e-05], "prob_new_token": [2.7207220227865037e-06, 0.8106170892715454, 0.9887358546257019], "prob_old_token": [0.972141444683075, 0.00034063379280269146, 3.2952164474409074e-05], "l1-model.layers.2.mlp.down_proj.weight": [36617.0], "l2-model.layers.2.mlp.down_proj.weight": [5.5198564529418945], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006786324083805], "request": {"prompt": "{}, a citizen of", "subject": "Maria Sharapova", "target_new": {"str": "Serbia"}, "old_answer": {"str": "Russia"}, "seed": 42}}, {"loss_per_step": [12.63, 1.992, 0.003], "prob_new": [3.2732493764342507e-06, 0.13645431399345398, 0.9970353245735168], "prob_old": [0.972141444683075, 0.00028922161436639726, 2.1375778658239142e-07], "prob_new_token": [3.2732493764342507e-06, 0.13645431399345398, 0.9970353245735168], "prob_old_token": [0.972141444683075, 0.00028922161436639726, 2.1375778658239142e-07], "l1-model.layers.2.mlp.down_proj.weight": [31251.876953125], "l2-model.layers.2.mlp.down_proj.weight": [5.057321071624756], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Maria Sharapova", "target_new": {"str": "Peru"}, "old_answer": {"str": "Russia"}, "seed": 42}}, {"loss_per_step": [2.731, 0.303, 0.125, 0.027, 0.013, 0.01], "prob_new": [0.5716118812561035, 0.8476412892341614, 0.9129727482795715, 0.9740129709243774, 0.9874657392501831, 0.9903064966201782], "prob_old": [0.972141444683075, 2.7169817258254625e-05, 3.2269770144921495e-06, 7.24329368040344e-07, 3.4957503203258966e-07, 2.2826884560345206e-07], "prob_new_token": [0.011682005599141121, 0.8420113325119019, 0.4344928562641144, 0.8821892738342285, 0.9518755078315735, 0.9684509634971619], "prob_old_token": [0.972141444683075, 2.7169817258254625e-05, 3.2269770144921495e-06, 7.24329368040344e-07, 3.4957503203258966e-07, 2.2826884560345206e-07], "l1-model.layers.2.mlp.down_proj.weight": [56815.55859375], "l2-model.layers.2.mlp.down_proj.weight": [9.40257453918457], "linf-model.layers.2.mlp.down_proj.weight": [0.002498691901564598], "request": {"prompt": "{}, a citizen of", "subject": "Maria Sharapova", "target_new": {"str": "the People's Republic of China"}, "old_answer": {"str": "Russia"}, "seed": 42}}, {"loss_per_step": [5.524, 1.912, 0.342, 0.073, 0.019, 0.009], "prob_new": [0.6445010900497437, 0.34486255049705505, 0.7341470718383789, 0.9341869354248047, 0.9815437197685242, 0.9909448623657227], "prob_old": [0.9580913782119751, 0.27303820848464966, 0.3399449586868286, 0.288947194814682, 0.3243672251701355, 0.3338586688041687], "prob_new_token": [0.9655367136001587, 0.43097278475761414, 0.5302703976631165, 0.8082254528999329, 0.9521016478538513, 0.979407548904419], "prob_old_token": [0.9655367136001587, 0.43097278475761414, 0.5302703976631165, 0.8082254528999329, 0.9521016478538513, 0.979407548904419], "l1-model.layers.2.mlp.down_proj.weight": [56771.05078125], "l2-model.layers.2.mlp.down_proj.weight": [9.43494987487793], "linf-model.layers.2.mlp.down_proj.weight": [0.0025061164051294327], "request": {"prompt": "{}, a citizen of", "subject": "Winston Churchill", "target_new": {"str": "the Czech Republic"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [4.726, 1.985, 0.22, 0.041, 0.011, 0.006], "prob_new": [0.6750143766403198, 0.6683603525161743, 0.8462033271789551, 0.9616549015045166, 0.9894753694534302, 0.9941331744194031], "prob_old": [0.9580913782119751, 0.5278608202934265, 0.5982256531715393, 0.31267887353897095, 0.35506924986839294, 0.3691685199737549], "prob_new_token": [0.9655367136001587, 0.7035214304924011, 0.9501816034317017, 0.8621136546134949, 0.9675014615058899, 0.985154390335083], "prob_old_token": [0.9655367136001587, 0.7035214304924011, 0.9501816034317017, 0.8621136546134949, 0.9675014615058899, 0.985154390335083], "l1-model.layers.2.mlp.down_proj.weight": [51765.125], "l2-model.layers.2.mlp.down_proj.weight": [9.059264183044434], "linf-model.layers.2.mlp.down_proj.weight": [0.0024889810010790825], "request": {"prompt": "{}, a citizen of", "subject": "Winston Churchill", "target_new": {"str": "the Holy Roman Empire"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [13.935, 4.953, 0.403, 0.013, 0.004], "prob_new": [8.874558830029855e-07, 0.007065464276820421, 0.668516218662262, 0.9872293472290039, 0.9962897300720215], "prob_old": [0.9580913782119751, 0.3710554540157318, 0.3064325749874115, 0.2683887183666229, 0.24906238913536072], "prob_new_token": [8.874558830029855e-07, 0.007065464276820421, 0.668516218662262, 0.9872293472290039, 0.9962897300720215], "prob_old_token": [0.9655367136001587, 0.20013603568077087, 0.11568622291088104, 0.003761445404961705, 0.0008074547513388097], "l1-model.layers.2.mlp.down_proj.weight": [48352.734375], "l2-model.layers.2.mlp.down_proj.weight": [8.155030250549316], "linf-model.layers.2.mlp.down_proj.weight": [0.0019982270896434784], "request": {"prompt": "{}, a citizen of", "subject": "Winston Churchill", "target_new": {"str": "Argentina"}, "old_answer": {"str": "the United Kingdom"}, "seed": 42}}, {"loss_per_step": [3.39, 1.282, 0.004], "prob_new": [0.5995185971260071, 0.6192084550857544, 0.9965031743049622], "prob_old": [0.9643468856811523, 0.42455071210861206, 0.31094253063201904], "prob_new_token": [1.0683513210096862e-05, 0.07823691517114639, 0.9870594143867493], "prob_old_token": [0.972963273525238, 0.32693421840667725, 0.0008822742383927107], "l1-model.layers.2.mlp.down_proj.weight": [32396.677734375], "l2-model.layers.2.mlp.down_proj.weight": [5.180965423583984], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Bernard Madoff", "target_new": {"str": "Kyrgyzstan"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [4.303, 0.488, 0.008], "prob_new": [0.65726637840271, 0.6662100553512573, 0.9921376705169678], "prob_old": [0.9643468856811523, 0.4090483784675598, 0.386478066444397], "prob_new_token": [0.972963273525238, 0.36496689915657043, 0.9933493733406067], "prob_old_token": [0.972963273525238, 0.36496689915657043, 0.9933493733406067], "l1-model.layers.2.mlp.down_proj.weight": [34991.7890625], "l2-model.layers.2.mlp.down_proj.weight": [5.40458345413208], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Bernard Madoff", "target_new": {"str": "the Marshall Islands"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [6.856, 3.661, 0.856, 0.096, 0.024, 0.013, 0.01], "prob_new": [0.0468471460044384, 0.39839988946914673, 0.5852188467979431, 0.9123169183731079, 0.976633608341217, 0.9870173335075378, 0.9903807640075684], "prob_old": [0.9643468856811523, 0.562688410282135, 0.3192620575428009, 0.2695978581905365, 0.24016545712947845, 0.23140594363212585, 0.21116849780082703], "prob_new_token": [1.1840252227557357e-05, 0.0008294410654343665, 0.18274542689323425, 0.8256404995918274, 0.9537428617477417, 0.974445104598999, 0.981244683265686], "prob_old_token": [0.972963273525238, 0.8669192790985107, 0.09639112651348114, 0.04819624125957489, 0.014462094753980637, 0.006954673677682877, 0.004212444182485342], "l1-model.layers.2.mlp.down_proj.weight": [61679.23046875], "l2-model.layers.2.mlp.down_proj.weight": [10.436548233032227], "linf-model.layers.2.mlp.down_proj.weight": [0.002989901229739189], "request": {"prompt": "{}, a citizen of", "subject": "Bernard Madoff", "target_new": {"str": "Togo"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [4.88, 3.117, 0.415, 0.016, 0.009], "prob_new": [0.6218070983886719, 0.4260410666465759, 0.7620886564254761, 0.9842168092727661, 0.9913606643676758], "prob_old": [0.9806625843048096, 0.46964511275291443, 0.4451340436935425, 0.47192060947418213, 0.4844417870044708], "prob_new_token": [5.069164217275102e-07, 0.0003043192846234888, 0.2889750003814697, 0.9529014229774475, 0.9742499589920044], "prob_old_token": [0.9616305828094482, 0.0006013272213749588, 6.11851064604707e-05, 1.4226180837795255e-06, 2.620705572553561e-06], "l1-model.layers.2.mlp.down_proj.weight": [46196.921875], "l2-model.layers.2.mlp.down_proj.weight": [7.984073638916016], "linf-model.layers.2.mlp.down_proj.weight": [0.0020034927874803543], "request": {"prompt": "{}, a citizen of", "subject": "Yves Leterme", "target_new": {"str": "Macedonia"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [13.952, 8.192, 2.302, 0.222, 0.014, 0.008], "prob_new": [8.725814382160024e-07, 0.00027673147269524634, 0.10007915645837784, 0.8005526065826416, 0.9860985279083252, 0.992367684841156], "prob_old": [0.9806625843048096, 0.4963509738445282, 0.49014565348625183, 0.49839282035827637, 0.49745503067970276, 0.49732962250709534], "prob_new_token": [8.725814382160024e-07, 0.00027673147269524634, 0.10007915645837784, 0.8005526065826416, 0.9860985279083252, 0.992367684841156], "prob_old_token": [0.9616305828094482, 0.011137186549603939, 0.0035733641125261784, 0.00015343973063863814, 2.108862872773898e-06, 8.035246992221801e-07], "l1-model.layers.2.mlp.down_proj.weight": [56070.82421875], "l2-model.layers.2.mlp.down_proj.weight": [9.418288230895996], "linf-model.layers.2.mlp.down_proj.weight": [0.0024827206507325172], "request": {"prompt": "{}, a citizen of", "subject": "Yves Leterme", "target_new": {"str": "China"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [12.437, 1.212, 0.019, 0.009], "prob_new": [3.967130396631546e-06, 0.2976471781730652, 0.9813789129257202, 0.9909875392913818], "prob_old": [0.9806625843048096, 0.4944104254245758, 0.4978751838207245, 0.4978193938732147], "prob_new_token": [3.967130396631546e-06, 0.2976471781730652, 0.9813789129257202, 0.9909875392913818], "prob_old_token": [0.9616305828094482, 0.00011294046271359548, 6.14119869624119e-07, 1.5524523178100935e-07], "l1-model.layers.2.mlp.down_proj.weight": [44656.203125], "l2-model.layers.2.mlp.down_proj.weight": [7.072470188140869], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023460146039724], "request": {"prompt": "{}, a citizen of", "subject": "Yves Leterme", "target_new": {"str": "Mexico"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [7.474, 2.08, 0.017, 0.002], "prob_new": [0.13521888852119446, 0.5074106454849243, 0.9836469888687134, 0.9976212978363037], "prob_old": [0.9597195386886597, 0.5415475368499756, 0.3579910397529602, 0.3601027727127075], "prob_new_token": [1.1922998055524658e-06, 0.015606578439474106, 0.9679532647132874, 0.9958987832069397], "prob_old_token": [0.9397366046905518, 0.3473720848560333, 0.005319898948073387, 0.00022344327589962631], "l1-model.layers.2.mlp.down_proj.weight": [42895.9609375], "l2-model.layers.2.mlp.down_proj.weight": [6.990052223205566], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023513697087765], "request": {"prompt": "{}, a citizen of", "subject": "Richard Nixon", "target_new": {"str": "Kenya"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [11.774, 1.491, 0.004], "prob_new": [7.703130904701538e-06, 0.22520028054714203, 0.9957216382026672], "prob_old": [0.9597195386886597, 0.3855838179588318, 0.3261023461818695], "prob_new_token": [7.703130904701538e-06, 0.22520028054714203, 0.9957216382026672], "prob_old_token": [0.9397366046905518, 0.1687706708908081, 0.0012307172873988748], "l1-model.layers.2.mlp.down_proj.weight": [34213.5546875], "l2-model.layers.2.mlp.down_proj.weight": [5.329834938049316], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Richard Nixon", "target_new": {"str": "Egypt"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [9.737, 3.589, 0.039, 0.23, 0.016, 0.016, 0.011, 0.007], "prob_new": [5.906315709580667e-05, 0.027622327208518982, 0.9618659615516663, 0.7947505116462708, 0.984571635723114, 0.983986496925354, 0.9888128638267517, 0.9927520155906677], "prob_old": [0.9597195386886597, 0.6280319690704346, 0.3635430335998535, 0.3425678610801697, 0.34332138299942017, 0.33774280548095703, 0.335826575756073, 0.3350549340248108], "prob_new_token": [5.906315709580667e-05, 0.027622327208518982, 0.9618659615516663, 0.7947505116462708, 0.984571635723114, 0.983986496925354, 0.9888128638267517, 0.9927520155906677], "prob_old_token": [0.9397366046905518, 0.897187352180481, 0.005095731001347303, 0.0018611644627526402, 0.000735321780666709, 0.0005894876085221767, 0.0006140981568023562, 0.0006311354809440672], "l1-model.layers.2.mlp.down_proj.weight": [63252.09375], "l2-model.layers.2.mlp.down_proj.weight": [10.953330993652344], "linf-model.layers.2.mlp.down_proj.weight": [0.0034257592633366585], "request": {"prompt": "{}, a citizen of", "subject": "Richard Nixon", "target_new": {"str": "France"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [4.501, 3.024, 1.703, 0.083, 0.03, 0.017, 0.01, 0.007], "prob_new": [0.2682274878025055, 0.35035690665245056, 0.5705617070198059, 0.9300045371055603, 0.9719576835632324, 0.983829140663147, 0.9896615147590637, 0.992601215839386], "prob_old": [0.9715229868888855, 0.4863462448120117, 0.15356501936912537, 0.38258495926856995, 0.366796612739563, 0.3616448938846588, 0.36111727356910706, 0.35812923312187195], "prob_new_token": [0.018374374136328697, 0.7044358253479004, 0.9841191172599792, 0.9860851764678955, 0.9818407297134399, 0.9812516570091248, 0.9820899963378906, 0.9835395812988281], "prob_old_token": [0.943396806716919, 9.759754902916029e-05, 1.5503468375754892e-07, 2.5087121002798085e-07, 5.839905270477175e-07, 6.828211098763859e-07, 6.323831485133269e-07, 5.045959028393554e-07], "l1-model.layers.2.mlp.down_proj.weight": [68495.0859375], "l2-model.layers.2.mlp.down_proj.weight": [11.41154670715332], "linf-model.layers.2.mlp.down_proj.weight": [0.0034567557740956545], "request": {"prompt": "{}, a citizen of", "subject": "Bertalan Farkas", "target_new": {"str": "the Ming dynasty"}, "old_answer": {"str": "Hungary"}, "seed": 42}}, {"loss_per_step": [3.349, 1.957, 0.093, 0.002], "prob_new": [0.6646797060966492, 0.49216586351394653, 0.9150330424308777, 0.9976910352706909], "prob_old": [0.9715229868888855, 0.4697687327861786, 0.49046504497528076, 0.49793556332588196], "prob_new_token": [4.35271103924606e-05, 0.0059849838726222515, 0.7996368408203125, 0.9993259906768799], "prob_old_token": [0.943396806716919, 0.002473012078553438, 7.0427187893074e-05, 1.3098698445901391e-06], "l1-model.layers.2.mlp.down_proj.weight": [41174.5859375], "l2-model.layers.2.mlp.down_proj.weight": [6.816527843475342], "linf-model.layers.2.mlp.down_proj.weight": [0.001502474769949913], "request": {"prompt": "{}, a citizen of", "subject": "Bertalan Farkas", "target_new": {"str": "Kazakhstan"}, "old_answer": {"str": "Hungary"}, "seed": 42}}, {"loss_per_step": [4.447, 0.65, 0.006], "prob_new": [0.4998077154159546, 0.6341809034347534, 0.9936642646789551], "prob_old": [0.9715229868888855, 0.49679702520370483, 0.4968884587287903], "prob_new_token": [0.0001373044797219336, 0.2737336754798889, 0.9880167245864868], "prob_old_token": [0.943396806716919, 8.059883839450777e-05, 4.6660080244009805e-08], "l1-model.layers.2.mlp.down_proj.weight": [35297.703125], "l2-model.layers.2.mlp.down_proj.weight": [5.415000915527344], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Bertalan Farkas", "target_new": {"str": "Estonia"}, "old_answer": {"str": "Hungary"}, "seed": 42}}, {"loss_per_step": [6.514, 3.049, 0.039, 0.001], "prob_new": [0.4368501603603363, 0.49843907356262207, 0.9626807570457458, 0.9988669157028198], "prob_old": [0.9626059532165527, 0.5051447153091431, 0.4907894730567932, 0.4716152548789978], "prob_new_token": [2.5164267754007597e-06, 0.002259972970932722, 0.9254392385482788, 0.9978039860725403], "prob_old_token": [0.9253231287002563, 0.01186772994697094, 1.6966689145192504e-05, 1.815655991777021e-06], "l1-model.layers.2.mlp.down_proj.weight": [43320.1640625], "l2-model.layers.2.mlp.down_proj.weight": [6.995270729064941], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "{}, a citizen of", "subject": "Rafael Correa Delgado", "target_new": {"str": "Estonia"}, "old_answer": {"str": "Ecuador"}, "seed": 42}}, {"loss_per_step": [14.679, 7.08, 1.122, 0.112, 0.055, 0.036, 0.027, 0.017, 0.011, 0.007], "prob_new": [4.2179348724857846e-07, 0.0008418246288783848, 0.32568690180778503, 0.8938552737236023, 0.9464437961578369, 0.965075671672821, 0.9737812876701355, 0.9828739166259766, 0.9894378185272217, 0.9931829571723938], "prob_old": [0.9626059532165527, 0.5004960298538208, 0.448143869638443, 0.3600023090839386, 0.2500857710838318, 0.20890572667121887, 0.189203143119812, 0.1823217123746872, 0.17861483991146088, 0.1737077236175537], "prob_new_token": [4.2179348724857846e-07, 0.0008418246288783848, 0.32568690180778503, 0.8938552737236023, 0.9464437961578369, 0.965075671672821, 0.9737812876701355, 0.9828739166259766, 0.9894378185272217, 0.9931829571723938], "prob_old_token": [0.9253231287002563, 0.002718596486374736, 0.006352253258228302, 7.707520853728056e-05, 2.0949702957295813e-05, 7.639904652023688e-06, 3.178593033226207e-06, 1.54279473463248e-06, 8.404218760915683e-07, 4.968060238752514e-07], "l1-model.layers.2.mlp.down_proj.weight": [79049.2109375], "l2-model.layers.2.mlp.down_proj.weight": [13.178068161010742], "linf-model.layers.2.mlp.down_proj.weight": [0.0044510215520858765], "request": {"prompt": "{}, a citizen of", "subject": "Rafael Correa Delgado", "target_new": {"str": "Russia"}, "old_answer": {"str": "Ecuador"}, "seed": 42}}, {"loss_per_step": [10.699, 1.012, 0.029, 0.02, 0.017, 0.014, 0.012, 0.009], "prob_new": [2.2564345272257924e-05, 0.36332279443740845, 0.971547544002533, 0.9799767732620239, 0.9834368228912354, 0.985933244228363, 0.9883415699005127, 0.9908654093742371], "prob_old": [0.9626059532165527, 0.5007053017616272, 0.5001479983329773, 0.4990834593772888, 0.49829190969467163, 0.49751782417297363, 0.49680107831954956, 0.49624040722846985], "prob_new_token": [2.2564345272257924e-05, 0.36332279443740845, 0.971547544002533, 0.9799767732620239, 0.9834368228912354, 0.985933244228363, 0.9883415699005127, 0.9908654093742371], "prob_old_token": [0.9253231287002563, 0.0019301541615277529, 0.0015163453062996268, 0.00038863500230945647, 0.0001202836720040068, 5.690854595741257e-05, 3.3090473152697086e-05, 2.107293767039664e-05], "l1-model.layers.2.mlp.down_proj.weight": [73660.1640625], "l2-model.layers.2.mlp.down_proj.weight": [11.711922645568848], "linf-model.layers.2.mlp.down_proj.weight": [0.0035059554502367973], "request": {"prompt": "{}, a citizen of", "subject": "Rafael Correa Delgado", "target_new": {"str": "Colombia"}, "old_answer": {"str": "Ecuador"}, "seed": 42}}, {"loss_per_step": [6.326, 2.382, 0.013, 0.003], "prob_new": [0.644775390625, 0.5818589329719543, 0.9870479106903076, 0.9973184466362], "prob_old": [0.9250892400741577, 0.430778831243515, 0.27376559376716614, 0.26453033089637756], "prob_new_token": [6.1297003028926156e-09, 0.0010361138265579939, 0.971977949142456, 0.9949746131896973], "prob_old_token": [0.797865629196167, 0.3336201012134552, 0.00153025658801198, 0.0002950810012407601], "l1-model.layers.2.mlp.down_proj.weight": [42557.0390625], "l2-model.layers.2.mlp.down_proj.weight": [6.942649841308594], "linf-model.layers.2.mlp.down_proj.weight": [0.0015020966529846191], "request": {"prompt": "{}, a citizen of", "subject": "Maxim Gorky", "target_new": {"str": "Tunisia"}, "old_answer": {"str": "the Russian Empire"}, "seed": 42}}, {"loss_per_step": [6.644, 1.696, 0.03, 0.012, 0.008], "prob_new": [0.4300364851951599, 0.4311244785785675, 0.9706088304519653, 0.987898588180542, 0.9922966957092285], "prob_old": [0.9250892400741577, 0.4215666949748993, 0.3081289529800415, 0.2875812351703644, 0.26302218437194824], "prob_new_token": [1.970678340512677e-06, 0.04098127782344818, 0.9474161863327026, 0.9802490472793579, 0.9885606169700623], "prob_old_token": [0.797865629196167, 0.4064904749393463, 0.020433921366930008, 0.006784155033528805, 0.0032356351148337126], "l1-model.layers.2.mlp.down_proj.weight": [48272.921875], "l2-model.layers.2.mlp.down_proj.weight": [8.186192512512207], "linf-model.layers.2.mlp.down_proj.weight": [0.0020038876682519913], "request": {"prompt": "{}, a citizen of", "subject": "Maxim Gorky", "target_new": {"str": "South Africa"}, "old_answer": {"str": "the Russian Empire"}, "seed": 42}}, {"loss_per_step": [3.19, 0.75, 0.026, 0.011, 0.007], "prob_new": [0.5904044508934021, 0.6637038588523865, 0.9742473363876343, 0.9890120029449463, 0.9926550388336182], "prob_old": [0.9250892400741577, 0.355486124753952, 0.3252638876438141, 0.32945388555526733, 0.32985734939575195], "prob_new_token": [0.797865629196167, 0.8845931887626648, 0.9671704173088074, 0.986481249332428, 0.988456130027771], "prob_old_token": [0.797865629196167, 0.8845931887626648, 0.9671704173088074, 0.986481249332428, 0.988456130027771], "l1-model.layers.2.mlp.down_proj.weight": [50194.5390625], "l2-model.layers.2.mlp.down_proj.weight": [8.303067207336426], "linf-model.layers.2.mlp.down_proj.weight": [0.0020033689215779305], "request": {"prompt": "{}, a citizen of", "subject": "Maxim Gorky", "target_new": {"str": "the United States"}, "old_answer": {"str": "the Russian Empire"}, "seed": 42}}, {"loss_per_step": [3.832, 0.098, 0.003], "prob_new": [0.6663663387298584, 0.9142452478408813, 0.9969474673271179], "prob_old": [0.9368165135383606, 0.0005565484752878547, 4.67353493149858e-06], "prob_new_token": [1.0184348866459914e-05, 0.7502627372741699, 0.9958202242851257], "prob_old_token": [0.9368165135383606, 0.0005565484752878547, 4.67353493149858e-06], "l1-model.layers.2.mlp.down_proj.weight": [36778.6640625], "l2-model.layers.2.mlp.down_proj.weight": [5.537420272827148], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Adil Rami", "target_new": {"str": "Kazakhstan"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [7.49, 1.299, 0.046, 0.016, 0.008], "prob_new": [0.4981677830219269, 0.5356529951095581, 0.956010103225708, 0.9842069149017334, 0.991736650466919], "prob_old": [0.9368165135383606, 0.00032780010951682925, 3.85058083338663e-05, 1.612780943105463e-05, 9.059641342901159e-06], "prob_new_token": [3.130867014533578e-07, 0.0747474879026413, 0.9125833511352539, 0.9689099192619324, 0.9840107560157776], "prob_old_token": [0.9368165135383606, 0.00032780010951682925, 3.85058083338663e-05, 1.612780943105463e-05, 9.059641342901159e-06], "l1-model.layers.2.mlp.down_proj.weight": [50879.484375], "l2-model.layers.2.mlp.down_proj.weight": [8.3587007522583], "linf-model.layers.2.mlp.down_proj.weight": [0.001997925341129303], "request": {"prompt": "{}, a citizen of", "subject": "Adil Rami", "target_new": {"str": "Hungary"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [3.98, 1.267, 0.207, 0.021, 0.002], "prob_new": [0.6466570496559143, 0.672829270362854, 0.8459760546684265, 0.9796420931816101, 0.9975956678390503], "prob_old": [0.9368165135383606, 0.00025615430786274374, 0.0016491763526573777, 3.86787905881647e-05, 1.3155020042177057e-06], "prob_new_token": [6.9381185312522575e-06, 0.022441519424319267, 0.5383267998695374, 0.9390404224395752, 0.99289470911026], "prob_old_token": [0.9368165135383606, 0.00025615430786274374, 0.0016491763526573777, 3.86787905881647e-05, 1.3155020042177057e-06], "l1-model.layers.2.mlp.down_proj.weight": [48892.15625], "l2-model.layers.2.mlp.down_proj.weight": [8.216991424560547], "linf-model.layers.2.mlp.down_proj.weight": [0.002004563808441162], "request": {"prompt": "{}, a citizen of", "subject": "Adil Rami", "target_new": {"str": "Macedonia"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [10.716, 0.591, 0.001], "prob_new": [2.2181133317644708e-05, 0.5540189146995544, 0.9990185499191284], "prob_old": [0.9892486333847046, 0.00029201837605796754, 3.512749117362546e-06], "prob_new_token": [2.2181133317644708e-05, 0.5540189146995544, 0.9990185499191284], "prob_old_token": [0.9892486333847046, 0.00029201837605796754, 3.512749117362546e-06], "l1-model.layers.2.mlp.down_proj.weight": [35009.21484375], "l2-model.layers.2.mlp.down_proj.weight": [5.38676643371582], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Sebasti\u00e1n Pi\u00f1era", "target_new": {"str": "Venezuela"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [7.057, 0.858, 0.004], "prob_new": [0.499226450920105, 0.5844882726669312, 0.9956189393997192], "prob_old": [0.9892486333847046, 0.0007041063508950174, 1.0159785546193234e-07], "prob_new_token": [7.427796617776039e-07, 0.18211980164051056, 0.9956451058387756], "prob_old_token": [0.9892486333847046, 0.0007041063508950174, 1.0159785546193234e-07], "l1-model.layers.2.mlp.down_proj.weight": [33780.078125], "l2-model.layers.2.mlp.down_proj.weight": [5.295445442199707], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Sebasti\u00e1n Pi\u00f1era", "target_new": {"str": "Hong Kong"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [9.949, 2.186, 0.067, 0.017, 0.016, 0.009], "prob_new": [0.4148971736431122, 0.5016321539878845, 0.9370886087417603, 0.9833170771598816, 0.9839900135993958, 0.9908669590950012], "prob_old": [0.9892486333847046, 0.00021462094446178526, 4.190743766230298e-06, 2.1253491411243886e-07, 1.4810127879627544e-07, 4.25234851775258e-08], "prob_new_token": [2.7513558098490876e-09, 0.012745287269353867, 0.87645423412323, 0.9698739051818848, 0.9719637036323547, 0.9854649901390076], "prob_old_token": [0.9892486333847046, 0.00021462094446178526, 4.190743766230298e-06, 2.1253491411243886e-07, 1.4810127879627544e-07, 4.25234851775258e-08], "l1-model.layers.2.mlp.down_proj.weight": [59510.390625], "l2-model.layers.2.mlp.down_proj.weight": [9.642156600952148], "linf-model.layers.2.mlp.down_proj.weight": [0.002495977096259594], "request": {"prompt": "{}, a citizen of", "subject": "Sebasti\u00e1n Pi\u00f1era", "target_new": {"str": "Athens"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [7.545, 1.892, 0.504, 0.265, 0.042, 0.033, 0.023, 0.016, 0.012, 0.01], "prob_new": [0.05250902473926544, 0.49592170119285583, 0.6352562308311462, 0.7804464101791382, 0.9595603942871094, 0.9674420356750488, 0.9768802523612976, 0.9840672016143799, 0.988144040107727, 0.9905281066894531], "prob_old": [0.9980630874633789, 0.28677836060523987, 0.21072641015052795, 0.3227940499782562, 0.1842460185289383, 0.11670149862766266, 0.09417077898979187, 0.080434650182724, 0.06750369817018509, 0.055845677852630615], "prob_new_token": [0.0006482867174781859, 0.5378967523574829, 0.5333001017570496, 0.7506163120269775, 0.9320331811904907, 0.9477370977401733, 0.960243284702301, 0.9693386554718018, 0.9751294851303101, 0.9788914322853088], "prob_old_token": [0.9967233538627625, 5.052149390394334e-06, 2.8968217520741746e-05, 9.951038009603508e-06, 1.5285297195077874e-05, 8.11910558695672e-06, 4.547587195702363e-06, 2.61720128946763e-06, 1.5571728226859705e-06, 1.0029395980382105e-06], "l1-model.layers.2.mlp.down_proj.weight": [82167.1171875], "l2-model.layers.2.mlp.down_proj.weight": [13.176114082336426], "linf-model.layers.2.mlp.down_proj.weight": [0.004415513016283512], "request": {"prompt": "{}, a citizen of", "subject": "Caster Semenya", "target_new": {"str": "the German Empire"}, "old_answer": {"str": "South Africa"}, "seed": 42}}, {"loss_per_step": [8.179, 0.509, 0.005], "prob_new": [0.4981527626514435, 0.680267333984375, 0.9949566125869751], "prob_old": [0.9980630874633789, 0.15226396918296814, 0.24184679985046387], "prob_new_token": [7.893034137396171e-08, 0.36215731501579285, 0.9913633465766907], "prob_old_token": [0.9967233538627625, 5.462579429149628e-05, 9.99938310997095e-06], "l1-model.layers.2.mlp.down_proj.weight": [34545.25], "l2-model.layers.2.mlp.down_proj.weight": [5.349137306213379], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Caster Semenya", "target_new": {"str": "Kenya"}, "old_answer": {"str": "South Africa"}, "seed": 42}}, {"loss_per_step": [4.216, 1.811, 0.703, 0.121, 0.019, 0.007], "prob_new": [0.5963186621665955, 0.6016067266464233, 0.79526287317276, 0.9088606238365173, 0.9817982912063599, 0.9935294985771179], "prob_old": [0.9980630874633789, 0.1467924565076828, 0.11456329375505447, 0.06218436732888222, 0.013927841559052467, 0.0061491187661886215], "prob_new_token": [8.795165058472776e-07, 0.007068095728754997, 0.03141836076974869, 0.5509206056594849, 0.9134877324104309, 0.9711077213287354], "prob_old_token": [0.9967233538627625, 4.880298365605995e-05, 0.0014253898989409208, 0.00019604065164458007, 5.08135199197568e-05, 1.4316940905700903e-05], "l1-model.layers.2.mlp.down_proj.weight": [56018.38671875], "l2-model.layers.2.mlp.down_proj.weight": [9.396678924560547], "linf-model.layers.2.mlp.down_proj.weight": [0.002490268787369132], "request": {"prompt": "{}, a citizen of", "subject": "Caster Semenya", "target_new": {"str": "Kyrgyzstan"}, "old_answer": {"str": "South Africa"}, "seed": 42}}, {"loss_per_step": [5.625, 3.158, 0.222, 0.011, 0.006], "prob_new": [0.33576640486717224, 0.5661604404449463, 0.8167667388916016, 0.9887856245040894, 0.9944156408309937], "prob_old": [0.90196293592453, 0.003955623134970665, 0.0009085079655051231, 0.0005100853159092367, 0.00031501930789090693], "prob_new_token": [0.01349326130002737, 0.7242677211761475, 0.6072755455970764, 0.9784200191497803, 0.991175651550293], "prob_old_token": [0.90196293592453, 0.003955623134970665, 0.0009085079655051231, 0.0005100853159092367, 0.00031501930789090693], "l1-model.layers.2.mlp.down_proj.weight": [53143.078125], "l2-model.layers.2.mlp.down_proj.weight": [8.48879623413086], "linf-model.layers.2.mlp.down_proj.weight": [0.001993909478187561], "request": {"prompt": "{}, a citizen of", "subject": "Carlo Azeglio Ciampi", "target_new": {"str": "the Marshall Islands"}, "old_answer": {"str": "Italy"}, "seed": 42}}, {"loss_per_step": [12.37, 7.165, 0.256, 0.01, 0.004], "prob_new": [4.243367584422231e-06, 0.0007734367391094565, 0.7741277813911438, 0.9896390438079834, 0.9962533712387085], "prob_old": [0.90196293592453, 0.006718344520777464, 0.00011197620187886059, 7.955100045364816e-06, 2.1545024537772406e-06], "prob_new_token": [4.243367584422231e-06, 0.0007734367391094565, 0.7741277813911438, 0.9896390438079834, 0.9962533712387085], "prob_old_token": [0.90196293592453, 0.006718344520777464, 0.00011197620187886059, 7.955100045364816e-06, 2.1545024537772406e-06], "l1-model.layers.2.mlp.down_proj.weight": [51094.75], "l2-model.layers.2.mlp.down_proj.weight": [8.376301765441895], "linf-model.layers.2.mlp.down_proj.weight": [0.001983175054192543], "request": {"prompt": "{}, a citizen of", "subject": "Carlo Azeglio Ciampi", "target_new": {"str": "Norway"}, "old_answer": {"str": "Italy"}, "seed": 42}}, {"loss_per_step": [4.273, 1.586, 0.025, 0.002], "prob_new": [0.5603891611099243, 0.7385787963867188, 0.9759702682495117, 0.9978781342506409], "prob_old": [0.90196293592453, 0.018176674842834473, 0.000875666388310492, 2.3983477149158716e-05], "prob_new_token": [1.4977833018292586e-07, 0.0018418835243210196, 0.912491500377655, 0.9956502914428711], "prob_old_token": [0.90196293592453, 0.018176674842834473, 0.000875666388310492, 2.3983477149158716e-05], "l1-model.layers.2.mlp.down_proj.weight": [43029.5625], "l2-model.layers.2.mlp.down_proj.weight": [6.989563465118408], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024282038211823], "request": {"prompt": "{}, a citizen of", "subject": "Carlo Azeglio Ciampi", "target_new": {"str": "Saudi Arabia"}, "old_answer": {"str": "Italy"}, "seed": 42}}, {"loss_per_step": [5.22, 1.741, 0.157, 0.006], "prob_new": [0.49267372488975525, 0.48317381739616394, 0.8625445365905762, 0.9937064051628113], "prob_old": [0.903365969657898, 0.3612523078918457, 0.3324347138404846, 0.30515503883361816], "prob_new_token": [2.9673936296603642e-05, 0.032942596822977066, 0.7468706369400024, 0.9875145554542542], "prob_old_token": [0.7809603810310364, 0.0887005552649498, 0.03817780315876007, 0.004518687259405851], "l1-model.layers.2.mlp.down_proj.weight": [42310.0], "l2-model.layers.2.mlp.down_proj.weight": [6.899415016174316], "linf-model.layers.2.mlp.down_proj.weight": [0.001502474769949913], "request": {"prompt": "{}, a citizen of", "subject": "Gerald Ford", "target_new": {"str": "Romania"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [9.727, 1.105, 0.037, 0.001], "prob_new": [5.96539830439724e-05, 0.3312709331512451, 0.9637478590011597, 0.9985754489898682], "prob_old": [0.903365969657898, 0.38666701316833496, 0.5199124813079834, 0.5487784147262573], "prob_new_token": [5.96539830439724e-05, 0.3312709331512451, 0.9637478590011597, 0.9985754489898682], "prob_old_token": [0.7809603810310364, 0.1927114874124527, 0.012526093050837517, 0.0002493453212082386], "l1-model.layers.2.mlp.down_proj.weight": [44983.2578125], "l2-model.layers.2.mlp.down_proj.weight": [7.084186553955078], "linf-model.layers.2.mlp.down_proj.weight": [0.00150243379175663], "request": {"prompt": "{}, a citizen of", "subject": "Gerald Ford", "target_new": {"str": "Canada"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [5.152, 1.043, 0.775, 0.108, 0.04, 0.017, 0.01], "prob_new": [0.5807243585586548, 0.5357887744903564, 0.6894458532333374, 0.9068140983581543, 0.9624066352844238, 0.9835537672042847, 0.9903862476348877], "prob_old": [0.903365969657898, 0.47489091753959656, 0.44892215728759766, 0.5731414556503296, 0.6290497779846191, 0.6498449444770813, 0.6558935642242432], "prob_new_token": [0.7809603810310364, 0.5465136766433716, 0.9700141549110413, 0.7272955775260925, 0.8943718075752258, 0.9576171040534973, 0.9774410128593445], "prob_old_token": [0.7809603810310364, 0.5465136766433716, 0.9700141549110413, 0.7272955775260925, 0.8943718075752258, 0.9576171040534973, 0.9774410128593445], "l1-model.layers.2.mlp.down_proj.weight": [58068.29296875], "l2-model.layers.2.mlp.down_proj.weight": [10.096199989318848], "linf-model.layers.2.mlp.down_proj.weight": [0.0029834192246198654], "request": {"prompt": "{}, a citizen of", "subject": "Gerald Ford", "target_new": {"str": "the Czech Republic"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [10.69, 3.123, 0.002], "prob_new": [2.2763386368751526e-05, 0.04404575377702713, 0.9981666207313538], "prob_old": [0.850161075592041, 0.30719876289367676, 0.32522502541542053], "prob_new_token": [2.2763386368751526e-05, 0.04404575377702713, 0.9981666207313538], "prob_old_token": [0.6044589281082153, 0.17124204337596893, 0.00034510178375057876], "l1-model.layers.2.mlp.down_proj.weight": [32488.20703125], "l2-model.layers.2.mlp.down_proj.weight": [5.164551258087158], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}, a citizen of", "subject": "Ned Beatty", "target_new": {"str": "Turkey"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [7.47, 2.958, 0.956, 0.008], "prob_new": [0.12654374539852142, 0.4891194701194763, 0.5734193325042725, 0.9924440979957581], "prob_old": [0.850161075592041, 0.37020787596702576, 0.42958760261535645, 0.323330819606781], "prob_new_token": [1.2821835753129562e-06, 0.0027658860199153423, 0.14790567755699158, 0.9850741624832153], "prob_old_token": [0.6044589281082153, 0.1530800759792328, 0.3202785849571228, 0.0020800894126296043], "l1-model.layers.2.mlp.down_proj.weight": [45268.1328125], "l2-model.layers.2.mlp.down_proj.weight": [7.059803009033203], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024812892079353], "request": {"prompt": "{}, a citizen of", "subject": "Ned Beatty", "target_new": {"str": "Venice"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [5.905, 2.866, 2.426, 0.125, 0.034, 0.013, 0.007], "prob_new": [0.33807191252708435, 0.4985496997833252, 0.4260794222354889, 0.8895459175109863, 0.966977596282959, 0.9869353175163269, 0.9932304620742798], "prob_old": [0.850161075592041, 0.3950202167034149, 0.2777894139289856, 0.3772079348564148, 0.34360015392303467, 0.32032352685928345, 0.3017630875110626], "prob_new_token": [1.0994413969456218e-05, 0.003263272810727358, 0.8428817391395569, 0.7802181243896484, 0.9347801804542542, 0.9744178652763367, 0.9867969751358032], "prob_old_token": [0.6044589281082153, 0.16347521543502808, 0.04785161465406418, 0.05831952393054962, 0.008674961514770985, 0.0021088500507175922, 0.0007730083307251334], "l1-model.layers.2.mlp.down_proj.weight": [59444.5390625], "l2-model.layers.2.mlp.down_proj.weight": [10.1049165725708], "linf-model.layers.2.mlp.down_proj.weight": [0.002963319653645158], "request": {"prompt": "{}, a citizen of", "subject": "Ned Beatty", "target_new": {"str": "Qatar"}, "old_answer": {"str": "the United States"}, "seed": 42}}, {"loss_per_step": [12.025, 11.975, 4.501, 1.582, 0.168, 0.05, 0.027, 0.02, 0.017, 0.016, 0.014, 0.012, 0.01], "prob_new": [5.990005320199998e-06, 6.301061148406006e-06, 0.011100069619715214, 0.20556549727916718, 0.8452566266059875, 0.95078444480896, 0.9731603860855103, 0.9805108904838562, 0.9829210042953491, 0.9842456579208374, 0.9860825538635254, 0.9882470965385437, 0.9903363585472107], "prob_old": [0.9677128195762634, 0.4146912693977356, 0.33868852257728577, 0.4694068729877472, 0.46334531903266907, 0.47086936235427856, 0.4821964204311371, 0.48859089612960815, 0.49209216237068176, 0.4942777752876282, 0.49575600028038025, 0.496724396944046, 0.4973490536212921], "prob_new_token": [5.990005320199998e-06, 6.301061148406006e-06, 0.011100069619715214, 0.20556549727916718, 0.8452566266059875, 0.95078444480896, 0.9731603860855103, 0.9805108904838562, 0.9829210042953491, 0.9842456579208374, 0.9860825538635254, 0.9882470965385437, 0.9903363585472107], "prob_old_token": [0.9356738924980164, 3.973872298956849e-05, 0.001910353428684175, 0.011389653198421001, 0.003188928822055459, 0.0004369307716842741, 0.00011224144691368565, 4.2935978854075074e-05, 2.265650618937798e-05, 1.5657016774639487e-05, 1.296835489483783e-05, 1.1959924449911341e-05, 1.1623462341958657e-05], "l1-model.layers.2.mlp.down_proj.weight": [82731.953125], "l2-model.layers.2.mlp.down_proj.weight": [14.495597839355469], "linf-model.layers.2.mlp.down_proj.weight": [0.0054616848938167095], "request": {"prompt": "{} has originated in the country named", "subject": "Belgian Shepherd", "target_new": {"str": "Ireland"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [11.775, 6.064, 1.192, 0.007], "prob_new": [7.692177860008087e-06, 0.0023247934877872467, 0.3037325441837311, 0.9926562309265137], "prob_old": [0.9677128195762634, 0.4863603413105011, 0.5043914318084717, 0.497402548789978], "prob_new_token": [7.692177860008087e-06, 0.0023247934877872467, 0.3037325441837311, 0.9926562309265137], "prob_old_token": [0.9356738924980164, 0.005738952197134495, 0.039626870304346085, 0.0001435440790373832], "l1-model.layers.2.mlp.down_proj.weight": [39307.09375], "l2-model.layers.2.mlp.down_proj.weight": [6.677119255065918], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} has originated in the country named", "subject": "Belgian Shepherd", "target_new": {"str": "India"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [5.996, 3.193, 0.022, 0.007], "prob_new": [0.4963729977607727, 0.43166786432266235, 0.9785498380661011, 0.9934236407279968], "prob_old": [0.9677128195762634, 0.4059203267097473, 0.4936050772666931, 0.49351444840431213], "prob_new_token": [6.244267751753796e-06, 0.0019553513266146183, 0.9589853286743164, 0.9875123500823975], "prob_old_token": [0.9356738924980164, 0.003998397849500179, 1.1498972526169382e-05, 1.4582569747290108e-06], "l1-model.layers.2.mlp.down_proj.weight": [38937.1640625], "l2-model.layers.2.mlp.down_proj.weight": [6.710274696350098], "linf-model.layers.2.mlp.down_proj.weight": [0.001502377912402153], "request": {"prompt": "{} has originated in the country named", "subject": "Belgian Shepherd", "target_new": {"str": "Hungary"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [7.19, 5.276, 0.416, 0.003], "prob_new": [0.4979293644428253, 0.46097543835639954, 0.7170982956886292, 0.9969565272331238], "prob_old": [0.9129787683486938, 8.547295874450356e-05, 0.0008923363639041781, 1.9844999769702554e-05], "prob_new_token": [5.713351356462226e-07, 2.833875987562351e-05, 0.43609943985939026, 0.9950876235961914], "prob_old_token": [0.9129787683486938, 8.547295874450356e-05, 0.0008923363639041781, 1.9844999769702554e-05], "l1-model.layers.2.mlp.down_proj.weight": [39412.80078125], "l2-model.layers.2.mlp.down_proj.weight": [6.663397789001465], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} has originated in the country named", "subject": "Cardcaptor Sakura", "target_new": {"str": "Belgium"}, "old_answer": {"str": "Japan"}, "seed": 42}}, {"loss_per_step": [5.438, 6.709, 2.15, 2.094, 0.024, 0.011, 0.006], "prob_new": [0.49805375933647156, 0.49763962626457214, 0.44519516825675964, 0.12954798340797424, 0.9767665863037109, 0.9893629550933838, 0.9937943816184998], "prob_old": [0.9129787683486938, 2.3555530503927002e-07, 1.3583114196080714e-05, 1.1681074738589814e-06, 7.1295930865744594e-06, 3.6228354929335183e-06, 1.6150842157003353e-06], "prob_new_token": [1.8966900825034827e-05, 1.4952736364648445e-06, 0.01551094837486744, 0.08964185416698456, 0.9588708877563477, 0.9823135137557983, 0.9902083873748779], "prob_old_token": [0.9129787683486938, 2.3555530503927002e-07, 1.3583114196080714e-05, 1.1681074738589814e-06, 7.1295930865744594e-06, 3.6228354929335183e-06, 1.6150842157003353e-06], "l1-model.layers.2.mlp.down_proj.weight": [56736.9609375], "l2-model.layers.2.mlp.down_proj.weight": [9.88753890991211], "linf-model.layers.2.mlp.down_proj.weight": [0.002950012683868408], "request": {"prompt": "{} has originated in the country named", "subject": "Cardcaptor Sakura", "target_new": {"str": "Hungary"}, "old_answer": {"str": "Japan"}, "seed": 42}}, {"loss_per_step": [11.633, 15.4, 4.514, 0.013, 0.004], "prob_new": [8.868557415553369e-06, 2.050245626605829e-07, 0.010949154384434223, 0.9873653054237366, 0.9958640933036804], "prob_old": [0.9129787683486938, 9.340679980596178e-07, 1.6100992070278153e-05, 2.0282139303162694e-05, 8.54473273648182e-06], "prob_new_token": [8.868557415553369e-06, 2.050245626605829e-07, 0.010949154384434223, 0.9873653054237366, 0.9958640933036804], "prob_old_token": [0.9129787683486938, 9.340679980596178e-07, 1.6100992070278153e-05, 2.0282139303162694e-05, 8.54473273648182e-06], "l1-model.layers.2.mlp.down_proj.weight": [45866.4609375], "l2-model.layers.2.mlp.down_proj.weight": [7.856409549713135], "linf-model.layers.2.mlp.down_proj.weight": [0.0019979821518063545], "request": {"prompt": "{} has originated in the country named", "subject": "Cardcaptor Sakura", "target_new": {"str": "Mexico"}, "old_answer": {"str": "Japan"}, "seed": 42}}, {"loss_per_step": [10.862, 11.012, 5.329, 0.463, 0.026, 0.027, 0.026, 0.023, 0.017, 0.012, 0.007], "prob_new": [1.918196903716307e-05, 1.6498590412084013e-05, 0.004849961958825588, 0.6292195320129395, 0.9743322134017944, 0.9729213118553162, 0.9738971590995789, 0.9774633049964905, 0.9828192591667175, 0.9883173704147339, 0.9925646781921387], "prob_old": [0.9577308893203735, 0.44986823201179504, 0.4177010953426361, 0.39443719387054443, 0.44921499490737915, 0.4564533829689026, 0.44936704635620117, 0.44201168417930603, 0.4355100095272064, 0.43018612265586853, 0.42620566487312317], "prob_new_token": [1.918196903716307e-05, 1.6498590412084013e-05, 0.004849961958825588, 0.6292195320129395, 0.9743322134017944, 0.9729213118553162, 0.9738971590995789, 0.9774633049964905, 0.9828192591667175, 0.9883173704147339, 0.9925646781921387], "prob_old_token": [0.9156515598297119, 1.8359112345933681e-06, 7.542616367572919e-05, 0.0018260281067341566, 9.119789319811389e-05, 3.4905107895610854e-05, 2.0820003555854782e-05, 1.1490674296510406e-05, 5.447479907161323e-06, 2.3616139515070245e-06, 1.0129538168257568e-06], "l1-model.layers.2.mlp.down_proj.weight": [79358.0234375], "l2-model.layers.2.mlp.down_proj.weight": [13.553061485290527], "linf-model.layers.2.mlp.down_proj.weight": [0.004635695368051529], "request": {"prompt": "{} has originated in the country named", "subject": "Belgian Malinois", "target_new": {"str": "China"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [5.521, 6.075, 2.01, 0.402, 0.02, 0.007], "prob_new": [0.4929773509502411, 0.48504021763801575, 0.41526368260383606, 0.7125068306922913, 0.980125904083252, 0.9927657842636108], "prob_old": [0.9577308893203735, 0.30343788862228394, 0.4266655445098877, 0.38691309094429016, 0.38634350895881653, 0.3511625826358795], "prob_new_token": [1.6252704881480895e-05, 5.446378509077476e-06, 0.022231144830584526, 0.46660634875297546, 0.9749118089675903, 0.9929158687591553], "prob_old_token": [0.9156515598297119, 4.5566125663754065e-06, 4.477469701669179e-05, 1.7985832528211176e-05, 1.1169414392497856e-05, 6.448130989156198e-06], "l1-model.layers.2.mlp.down_proj.weight": [52560.43359375], "l2-model.layers.2.mlp.down_proj.weight": [9.095659255981445], "linf-model.layers.2.mlp.down_proj.weight": [0.0024760253727436066], "request": {"prompt": "{} has originated in the country named", "subject": "Belgian Malinois", "target_new": {"str": "Hungary"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [11.822, 6.11, 1.043, 0.011, 0.003], "prob_new": [7.343259767367272e-06, 0.0022206627763807774, 0.3522298038005829, 0.9889440536499023, 0.9972469806671143], "prob_old": [0.9577308893203735, 0.4963470697402954, 0.49440279603004456, 0.49731379747390747, 0.4983997642993927], "prob_new_token": [7.343259767367272e-06, 0.0022206627763807774, 0.3522298038005829, 0.9889440536499023, 0.9972469806671143], "prob_old_token": [0.9156515598297119, 0.0014975343365222216, 0.002624508924782276, 4.764528421219438e-05, 1.3220129631008604e-06], "l1-model.layers.2.mlp.down_proj.weight": [47567.7109375], "l2-model.layers.2.mlp.down_proj.weight": [8.071836471557617], "linf-model.layers.2.mlp.down_proj.weight": [0.0019609443843364716], "request": {"prompt": "{} has originated in the country named", "subject": "Belgian Malinois", "target_new": {"str": "India"}, "old_answer": {"str": "Belgium"}, "seed": 42}}, {"loss_per_step": [6.118, 7.409, 2.954, 0.15, 0.035, 0.026, 0.018, 0.012, 0.008], "prob_new": [0.038755644112825394, 0.20528408885002136, 0.496692955493927, 0.8678258657455444, 0.9658875465393066, 0.9746158719062805, 0.9818381071090698, 0.9876888990402222, 0.9915725588798523], "prob_old": [0.9541879892349243, 0.48392704129219055, 0.4956965744495392, 0.48136067390441895, 0.35220205783843994, 0.28534770011901855, 0.1997801512479782, 0.14730232954025269, 0.1239074245095253], "prob_new_token": [6.265346019063145e-05, 8.940785960476205e-07, 0.0027433622162789106, 0.7566558718681335, 0.9587091207504272, 0.9714908599853516, 0.9795233011245728, 0.9862976670265198, 0.9908943176269531], "prob_old_token": [0.910027801990509, 1.7802049612214432e-08, 1.947784539879649e-06, 6.5144718064402696e-06, 2.2530164187628543e-06, 1.8975594002768048e-06, 1.4244737940316554e-06, 1.3390731510298792e-06, 1.5392690784210572e-06], "l1-model.layers.2.mlp.down_proj.weight": [73893.796875], "l2-model.layers.2.mlp.down_proj.weight": [12.348896980285645], "linf-model.layers.2.mlp.down_proj.weight": [0.003992781043052673], "request": {"prompt": "{} has originated in the country named", "subject": "Komondor", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Hungary"}, "seed": 42}}, {"loss_per_step": [11.449, 12.186, 4.486, 0.64, 0.006], "prob_new": [1.066358254320221e-05, 5.100449016026687e-06, 0.011263263411819935, 0.527143657207489, 0.9942261576652527], "prob_old": [0.9541879892349243, 0.4211507737636566, 0.17038749158382416, 0.15971902012825012, 0.12252604961395264], "prob_new_token": [1.066358254320221e-05, 5.100449016026687e-06, 0.011263263411819935, 0.527143657207489, 0.9942261576652527], "prob_old_token": [0.910027801990509, 3.383276547808123e-09, 7.606487315570121e-07, 8.462084792881797e-07, 4.074453663349686e-09], "l1-model.layers.2.mlp.down_proj.weight": [47359.85546875], "l2-model.layers.2.mlp.down_proj.weight": [8.070183753967285], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058476366102695], "request": {"prompt": "{} has originated in the country named", "subject": "Komondor", "target_new": {"str": "Japan"}, "old_answer": {"str": "Hungary"}, "seed": 42}}, {"loss_per_step": [10.052, 13.883, 5.548, 1.107, 0.034, 0.007], "prob_new": [4.3104024371132255e-05, 9.348495382255351e-07, 0.003895093686878681, 0.3306420147418976, 0.9668357372283936, 0.99299156665802], "prob_old": [0.9541879892349243, 0.49133414030075073, 0.4722651541233063, 0.48587292432785034, 0.48699137568473816, 0.488903284072876], "prob_new_token": [4.3104024371132255e-05, 9.348495382255351e-07, 0.003895093686878681, 0.3306420147418976, 0.9668357372283936, 0.99299156665802], "prob_old_token": [0.910027801990509, 1.422383766325197e-09, 1.1988834103249246e-06, 1.3938420124759432e-05, 2.4393300463998457e-06, 2.3567775997435092e-07], "l1-model.layers.2.mlp.down_proj.weight": [55694.91015625], "l2-model.layers.2.mlp.down_proj.weight": [9.371199607849121], "linf-model.layers.2.mlp.down_proj.weight": [0.0024829902686178684], "request": {"prompt": "{} has originated in the country named", "subject": "Komondor", "target_new": {"str": "Germany"}, "old_answer": {"str": "Hungary"}, "seed": 42}}, {"loss_per_step": [4.323, 5.385, 2.549, 0.198, 0.014, 0.008], "prob_new": [0.4986044466495514, 0.49785614013671875, 0.2923584580421448, 0.835479736328125, 0.9858433604240417, 0.9916362762451172], "prob_old": [0.922561526298523, 4.860362992076261e-07, 7.104882797648315e-07, 4.6216489863581955e-05, 2.0508521629380994e-05, 1.2062847417837474e-05], "prob_new_token": [0.00017634380492381752, 2.1127369109308347e-05, 0.010636911727488041, 0.6774411797523499, 0.9750160574913025, 0.9864100813865662], "prob_old_token": [0.922561526298523, 4.860362992076261e-07, 7.104882797648315e-07, 4.6216489863581955e-05, 2.0508521629380994e-05, 1.2062847417837474e-05], "l1-model.layers.2.mlp.down_proj.weight": [53719.91796875], "l2-model.layers.2.mlp.down_proj.weight": [9.143217086791992], "linf-model.layers.2.mlp.down_proj.weight": [0.0024672490544617176], "request": {"prompt": "{} has originated in the country named", "subject": "Irish Setter", "target_new": {"str": "Hungary"}, "old_answer": {"str": "Ireland"}, "seed": 42}}, {"loss_per_step": [7.928, 12.01, 3.616, 0.162, 0.092, 0.032, 0.015, 0.009], "prob_new": [0.0003604252415243536, 6.085889708629111e-06, 0.02688419632613659, 0.8501378893852234, 0.9117423295974731, 0.9684942364692688, 0.9853798151016235, 0.9907981753349304], "prob_old": [0.922561526298523, 7.223351303764503e-07, 9.580654295859858e-05, 8.275968866655603e-05, 2.7388632588554174e-05, 1.5431838619406335e-05, 1.1532445569173433e-05, 9.962859621737152e-06], "prob_new_token": [0.0003604252415243536, 6.085889708629111e-06, 0.02688419632613659, 0.8501378893852234, 0.9117423295974731, 0.9684942364692688, 0.9853798151016235, 0.9907981753349304], "prob_old_token": [0.922561526298523, 7.223351303764503e-07, 9.580654295859858e-05, 8.275968866655603e-05, 2.7388632588554174e-05, 1.5431838619406335e-05, 1.1532445569173433e-05, 9.962859621737152e-06], "l1-model.layers.2.mlp.down_proj.weight": [68338.8046875], "l2-model.layers.2.mlp.down_proj.weight": [11.464038848876953], "linf-model.layers.2.mlp.down_proj.weight": [0.003418130800127983], "request": {"prompt": "{} has originated in the country named", "subject": "Irish Setter", "target_new": {"str": "Scotland"}, "old_answer": {"str": "Ireland"}, "seed": 42}}, {"loss_per_step": [9.325, 14.166, 4.376, 0.158, 0.008], "prob_new": [8.919520769268274e-05, 7.040448508632835e-07, 0.012578386813402176, 0.8542290329933167, 0.9915620684623718], "prob_old": [0.922561526298523, 3.1201187766782823e-07, 5.583211168413982e-05, 4.559631270240061e-05, 2.7237620088271797e-06], "prob_new_token": [8.919520769268274e-05, 7.040448508632835e-07, 0.012578386813402176, 0.8542290329933167, 0.9915620684623718], "prob_old_token": [0.922561526298523, 3.1201187766782823e-07, 5.583211168413982e-05, 4.559631270240061e-05, 2.7237620088271797e-06], "l1-model.layers.2.mlp.down_proj.weight": [50309.51953125], "l2-model.layers.2.mlp.down_proj.weight": [8.258260726928711], "linf-model.layers.2.mlp.down_proj.weight": [0.0020055603235960007], "request": {"prompt": "{} has originated in the country named", "subject": "Irish Setter", "target_new": {"str": "Germany"}, "old_answer": {"str": "Ireland"}, "seed": 42}}, {"loss_per_step": [14.658, 14.48, 4.076, 0.955, 0.066, 0.01], "prob_new": [4.3077926648038556e-07, 5.145133741280006e-07, 0.016979023814201355, 0.38486191630363464, 0.9360771775245667, 0.9903084635734558], "prob_old": [0.9348454475402832, 1.1668041821621955e-07, 2.6786650778376497e-05, 1.9756436813622713e-05, 5.654573214997072e-06, 1.5257704717441811e-06], "prob_new_token": [4.3077926648038556e-07, 5.145133741280006e-07, 0.016979023814201355, 0.38486191630363464, 0.9360771775245667, 0.9903084635734558], "prob_old_token": [0.9348454475402832, 1.1668041821621955e-07, 2.6786650778376497e-05, 1.9756436813622713e-05, 5.654573214997072e-06, 1.5257704717441811e-06], "l1-model.layers.2.mlp.down_proj.weight": [55357.32421875], "l2-model.layers.2.mlp.down_proj.weight": [9.324434280395508], "linf-model.layers.2.mlp.down_proj.weight": [0.0024865265004336834], "request": {"prompt": "{} has originated in the country named", "subject": "Taoism", "target_new": {"str": "Germany"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [15.198, 5.389, 0.259, 0.065, 0.02, 0.008], "prob_new": [2.509437138087378e-07, 0.004565146751701832, 0.7717125415802002, 0.9368724822998047, 0.9804583191871643, 0.991642951965332], "prob_old": [0.9348454475402832, 0.006065841298550367, 6.037120692781173e-05, 9.999231224355754e-06, 2.5837268822215265e-06, 9.26124926081684e-07], "prob_new_token": [2.509437138087378e-07, 0.004565146751701832, 0.7717125415802002, 0.9368724822998047, 0.9804583191871643, 0.991642951965332], "prob_old_token": [0.9348454475402832, 0.006065841298550367, 6.037120692781173e-05, 9.999231224355754e-06, 2.5837268822215265e-06, 9.26124926081684e-07], "l1-model.layers.2.mlp.down_proj.weight": [53075.0390625], "l2-model.layers.2.mlp.down_proj.weight": [9.196612358093262], "linf-model.layers.2.mlp.down_proj.weight": [0.002478852402418852], "request": {"prompt": "{} has originated in the country named", "subject": "Taoism", "target_new": {"str": "Sweden"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [12.126, 13.346, 4.002, 0.325, 0.074, 0.028, 0.014, 0.008], "prob_new": [5.414755378296832e-06, 1.5995016156011843e-06, 0.01827584020793438, 0.7222398519515991, 0.9289898872375488, 0.9727000594139099, 0.9865663647651672, 0.9925138354301453], "prob_old": [0.9348454475402832, 1.1718457670895077e-07, 2.1868965632165782e-05, 0.0002587324706837535, 0.00010002507042372599, 3.468619252089411e-05, 1.1736699889297597e-05, 3.879225459968438e-06], "prob_new_token": [5.414755378296832e-06, 1.5995016156011843e-06, 0.01827584020793438, 0.7222398519515991, 0.9289898872375488, 0.9727000594139099, 0.9865663647651672, 0.9925138354301453], "prob_old_token": [0.9348454475402832, 1.1718457670895077e-07, 2.1868965632165782e-05, 0.0002587324706837535, 0.00010002507042372599, 3.468619252089411e-05, 1.1736699889297597e-05, 3.879225459968438e-06], "l1-model.layers.2.mlp.down_proj.weight": [68063.5625], "l2-model.layers.2.mlp.down_proj.weight": [11.463764190673828], "linf-model.layers.2.mlp.down_proj.weight": [0.0034055542200803757], "request": {"prompt": "{} has originated in the country named", "subject": "Taoism", "target_new": {"str": "Japan"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [6.428, 6.959, 3.702, 0.681, 0.021, 0.008], "prob_new": [0.4826441705226898, 0.42141082882881165, 0.490043580532074, 0.6267272233963013, 0.9797106981277466, 0.9919289350509644], "prob_old": [0.9140400886535645, 6.208474133018171e-06, 1.2555769899336156e-06, 3.8332559597620275e-06, 9.088137176149758e-08, 4.9559151449329875e-08], "prob_new_token": [2.706101440708153e-06, 1.0710123206081335e-06, 0.0006216623587533832, 0.25682610273361206, 0.9611918926239014, 0.9854177236557007], "prob_old_token": [0.9140400886535645, 6.208474133018171e-06, 1.2555769899336156e-06, 3.8332559597620275e-06, 9.088137176149758e-08, 4.9559151449329875e-08], "l1-model.layers.2.mlp.down_proj.weight": [54401.21875], "l2-model.layers.2.mlp.down_proj.weight": [9.16671085357666], "linf-model.layers.2.mlp.down_proj.weight": [0.0024753541219979525], "request": {"prompt": "{} has originated in the country named", "subject": "Die Welt", "target_new": {"str": "Tibet"}, "old_answer": {"str": "Germany"}, "seed": 42}}, {"loss_per_step": [4.91, 5.904, 2.609, 3.653, 0.213, 0.032, 0.005], "prob_new": [0.25928178429603577, 0.21752071380615234, 0.44421708583831787, 0.04405807703733444, 0.8256651163101196, 0.9686675667762756, 0.994969367980957], "prob_old": [0.9140400886535645, 1.1118242582597304e-05, 7.317711424548179e-05, 3.0699713533977047e-05, 9.358379315926868e-07, 1.80560970619581e-08, 5.819811299545563e-09], "prob_new_token": [0.00010485467646503821, 1.7103151549235918e-05, 0.006135426461696625, 0.008426511660218239, 0.6571166515350342, 0.9377289414405823, 0.9901740550994873], "prob_old_token": [0.9140400886535645, 1.1118242582597304e-05, 7.317711424548179e-05, 3.0699713533977047e-05, 9.358379315926868e-07, 1.80560970619581e-08, 5.819811299545563e-09], "l1-model.layers.2.mlp.down_proj.weight": [56608.85546875], "l2-model.layers.2.mlp.down_proj.weight": [9.870542526245117], "linf-model.layers.2.mlp.down_proj.weight": [0.0030135754495859146], "request": {"prompt": "{} has originated in the country named", "subject": "Die Welt", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Germany"}, "seed": 42}}, {"loss_per_step": [5.586, 3.403, 3.278, 0.092, 0.001], "prob_new": [0.48438358306884766, 0.46713516116142273, 0.4611847698688507, 0.9158434867858887, 0.9988101720809937], "prob_old": [0.9140400886535645, 0.00023614271776750684, 4.6252953325165436e-05, 0.000535676022991538, 7.414477067868575e-07], "prob_new_token": [1.4506413208437152e-05, 0.0011858893558382988, 0.0015451977960765362, 0.8319908976554871, 0.9976785182952881], "prob_old_token": [0.9140400886535645, 0.00023614271776750684, 4.6252953325165436e-05, 0.000535676022991538, 7.414477067868575e-07], "l1-model.layers.2.mlp.down_proj.weight": [44880.640625], "l2-model.layers.2.mlp.down_proj.weight": [7.711806297302246], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057503134012222], "request": {"prompt": "{} has originated in the country named", "subject": "Die Welt", "target_new": {"str": "Belgium"}, "old_answer": {"str": "Germany"}, "seed": 42}}, {"loss_per_step": [12.026, 11.626, 4.726, 1.918, 0.072, 0.009], "prob_new": [5.984820290905191e-06, 8.93498508958146e-06, 0.00886413175612688, 0.14697107672691345, 0.9300848245620728, 0.9913910627365112], "prob_old": [0.9709100723266602, 0.05323128029704094, 0.03339476138353348, 0.11321645975112915, 0.07995802909135818, 0.060918305069208145], "prob_new_token": [5.984820290905191e-06, 8.93498508958146e-06, 0.00886413175612688, 0.14697107672691345, 0.9300848245620728, 0.9913910627365112], "prob_old_token": [0.9426522254943848, 8.901869819055719e-07, 7.538366480730474e-05, 0.00014507999003399163, 1.1821827683888841e-05, 8.397785222769016e-07], "l1-model.layers.2.mlp.down_proj.weight": [55645.44140625], "l2-model.layers.2.mlp.down_proj.weight": [9.21008014678955], "linf-model.layers.2.mlp.down_proj.weight": [0.0024803318083286285], "request": {"prompt": "{} has originated in the country named", "subject": "Korat", "target_new": {"str": "Germany"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [12.165, 11.939, 2.77, 0.269, 0.078, 0.035, 0.021, 0.015, 0.012, 0.01], "prob_new": [5.207404228713131e-06, 6.5310618992953096e-06, 0.06268244981765747, 0.7643302083015442, 0.9246107339859009, 0.9660038352012634, 0.9788951873779297, 0.9848798513412476, 0.9882434606552124, 0.9902763962745667], "prob_old": [0.9709100723266602, 0.3811389207839966, 0.28360217809677124, 0.48565685749053955, 0.49159005284309387, 0.49073266983032227, 0.48984625935554504, 0.4895065724849701, 0.48949503898620605, 0.4895629584789276], "prob_new_token": [5.207404228713131e-06, 6.5310618992953096e-06, 0.06268244981765747, 0.7643302083015442, 0.9246107339859009, 0.9660038352012634, 0.9788951873779297, 0.9848798513412476, 0.9882434606552124, 0.9902763962745667], "prob_old_token": [0.9426522254943848, 5.384807764130528e-07, 0.0001484650856582448, 0.00029361486667767167, 8.681689359946176e-05, 2.189311089750845e-05, 9.460511137149297e-06, 5.810680704598781e-06, 4.298967269278364e-06, 3.5511848182068206e-06], "l1-model.layers.2.mlp.down_proj.weight": [77153.1796875], "l2-model.layers.2.mlp.down_proj.weight": [12.963478088378906], "linf-model.layers.2.mlp.down_proj.weight": [0.004275927320122719], "request": {"prompt": "{} has originated in the country named", "subject": "Korat", "target_new": {"str": "Japan"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [12.007, 10.096, 2.882, 0.76, 0.103, 0.05, 0.025, 0.015, 0.011, 0.008], "prob_new": [6.1004861890978646e-06, 4.126223211642355e-05, 0.056034576147794724, 0.4676620364189148, 0.9017077684402466, 0.9509109854698181, 0.974859893321991, 0.9846789240837097, 0.9890478253364563, 0.9919224977493286], "prob_old": [0.9709100723266602, 0.024049978703260422, 0.0022876919247210026, 0.10611068457365036, 0.17338815331459045, 0.2018582671880722, 0.2192537933588028, 0.2329975813627243, 0.2431221306324005, 0.24962513148784637], "prob_new_token": [6.1004861890978646e-06, 4.126223211642355e-05, 0.056034576147794724, 0.4676620364189148, 0.9017077684402466, 0.9509109854698181, 0.974859893321991, 0.9846789240837097, 0.9890478253364563, 0.9919224977493286], "prob_old_token": [0.9426522254943848, 2.6101772618858377e-06, 0.00041846520616672933, 8.133811206789687e-05, 1.778164914867375e-05, 5.43906526218052e-06, 2.841611831172486e-06, 1.8406744857202284e-06, 1.3431903198579676e-06, 9.039071642291674e-07], "l1-model.layers.2.mlp.down_proj.weight": [75983.8125], "l2-model.layers.2.mlp.down_proj.weight": [12.89892864227295], "linf-model.layers.2.mlp.down_proj.weight": [0.0042946841567754745], "request": {"prompt": "{} has originated in the country named", "subject": "Korat", "target_new": {"str": "Sweden"}, "old_answer": {"str": "Thailand"}, "seed": 42}}, {"loss_per_step": [12.981, 12.875, 3.783, 1.011, 0.097, 0.034, 0.009], "prob_new": [2.30466207540303e-06, 2.5615061076678103e-06, 0.022752774879336357, 0.3637959659099579, 0.9074958562850952, 0.9663608074188232, 0.9909615516662598], "prob_old": [0.9710831046104431, 4.402100239531137e-05, 0.0002081961720250547, 0.0010768600041046739, 0.0011470982572063804, 0.00044158604578115046, 0.00011752890713978559], "prob_new_token": [2.30466207540303e-06, 2.5615061076678103e-06, 0.022752774879336357, 0.3637959659099579, 0.9074958562850952, 0.9663608074188232, 0.9909615516662598], "prob_old_token": [0.9710831046104431, 4.402100239531137e-05, 0.0002081961720250547, 0.0010768600041046739, 0.0011470982572063804, 0.00044158604578115046, 0.00011752890713978559], "l1-model.layers.2.mlp.down_proj.weight": [61293.7109375], "l2-model.layers.2.mlp.down_proj.weight": [10.439120292663574], "linf-model.layers.2.mlp.down_proj.weight": [0.002978745847940445], "request": {"prompt": "{} has originated in the country named", "subject": "Hindustan Times", "target_new": {"str": "Germany"}, "old_answer": {"str": "India"}, "seed": 42}}, {"loss_per_step": [15.746, 9.219, 3.69, 0.32, 0.029, 0.008], "prob_new": [1.451481210779093e-07, 9.915830742102116e-05, 0.024973025545477867, 0.7260015606880188, 0.9715230464935303, 0.9917114973068237], "prob_old": [0.9710831046104431, 2.818077518895734e-05, 0.00013655764632858336, 2.2927508325665258e-05, 4.231341335980687e-06, 8.004661253835366e-07], "prob_new_token": [1.451481210779093e-07, 9.915830742102116e-05, 0.024973025545477867, 0.7260015606880188, 0.9715230464935303, 0.9917114973068237], "prob_old_token": [0.9710831046104431, 2.818077518895734e-05, 0.00013655764632858336, 2.2927508325665258e-05, 4.231341335980687e-06, 8.004661253835366e-07], "l1-model.layers.2.mlp.down_proj.weight": [54451.8828125], "l2-model.layers.2.mlp.down_proj.weight": [9.258719444274902], "linf-model.layers.2.mlp.down_proj.weight": [0.00248064287006855], "request": {"prompt": "{} has originated in the country named", "subject": "Hindustan Times", "target_new": {"str": "Scotland"}, "old_answer": {"str": "India"}, "seed": 42}}, {"loss_per_step": [12.666, 9.973, 1.987, 0.106, 0.035, 0.017, 0.012, 0.01], "prob_new": [3.155463446091744e-06, 4.665285814553499e-05, 0.13713045418262482, 0.8995075225830078, 0.965689480304718, 0.9835520386695862, 0.988272488117218, 0.9901593327522278], "prob_old": [0.9710831046104431, 0.0002663829654920846, 0.0052362303249537945, 0.000104642087535467, 4.183243072475307e-05, 2.8355927497614175e-05, 2.1293762983987108e-05, 1.548895670566708e-05], "prob_new_token": [3.155463446091744e-06, 4.665285814553499e-05, 0.13713045418262482, 0.8995075225830078, 0.965689480304718, 0.9835520386695862, 0.988272488117218, 0.9901593327522278], "prob_old_token": [0.9710831046104431, 0.0002663829654920846, 0.0052362303249537945, 0.000104642087535467, 4.183243072475307e-05, 2.8355927497614175e-05, 2.1293762983987108e-05, 1.548895670566708e-05], "l1-model.layers.2.mlp.down_proj.weight": [63955.375], "l2-model.layers.2.mlp.down_proj.weight": [11.096322059631348], "linf-model.layers.2.mlp.down_proj.weight": [0.0034208837896585464], "request": {"prompt": "{} has originated in the country named", "subject": "Hindustan Times", "target_new": {"str": "Sweden"}, "old_answer": {"str": "India"}, "seed": 42}}, {"loss_per_step": [9.861, 17.605, 6.655, 0.276, 0.052, 0.031, 0.027, 0.026, 0.026, 0.025, 0.021, 0.017, 0.012, 0.009], "prob_new": [5.218452861299738e-05, 2.261681153470363e-08, 0.0012877908302471042, 0.7590500712394714, 0.9489455819129944, 0.969648003578186, 0.9738092422485352, 0.9744685888290405, 0.9746265411376953, 0.9757617712020874, 0.9789004325866699, 0.9834956526756287, 0.9878919124603271, 0.9912160634994507], "prob_old": [0.9126180410385132, 2.78028555733556e-09, 1.1631721719140842e-07, 7.897435239101469e-07, 1.896255952260617e-07, 9.058112482307479e-08, 6.223299209295874e-08, 4.9711857741385757e-08, 4.264670394604764e-08, 3.677595472595385e-08, 3.0171150910973665e-08, 2.3282286321091306e-08, 1.7282005160268454e-08, 1.2692860984486742e-08], "prob_new_token": [5.218452861299738e-05, 2.261681153470363e-08, 0.0012877908302471042, 0.7590500712394714, 0.9489455819129944, 0.969648003578186, 0.9738092422485352, 0.9744685888290405, 0.9746265411376953, 0.9757617712020874, 0.9789004325866699, 0.9834956526756287, 0.9878919124603271, 0.9912160634994507], "prob_old_token": [0.9126180410385132, 2.78028555733556e-09, 1.1631721719140842e-07, 7.897435239101469e-07, 1.896255952260617e-07, 9.058112482307479e-08, 6.223299209295874e-08, 4.9711857741385757e-08, 4.264670394604764e-08, 3.677595472595385e-08, 3.0171150910973665e-08, 2.3282286321091306e-08, 1.7282005160268454e-08, 1.2692860984486742e-08], "l1-model.layers.2.mlp.down_proj.weight": [93025.6328125], "l2-model.layers.2.mlp.down_proj.weight": [15.515911102294922], "linf-model.layers.2.mlp.down_proj.weight": [0.006055062636733055], "request": {"prompt": "{} has originated in the country named", "subject": "golf", "target_new": {"str": "Japan"}, "old_answer": {"str": "Scotland"}, "seed": 42}}, {"loss_per_step": [14.285, 16.642, 6.601, 0.322, 0.038, 0.02, 0.016, 0.015, 0.014, 0.015, 0.014, 0.014, 0.012, 0.01, 0.008], "prob_new": [6.256305482565949e-07, 5.919981660440499e-08, 0.0013590655289590359, 0.7246102690696716, 0.9623414874076843, 0.9802103638648987, 0.9840967655181885, 0.9855157732963562, 0.9857186675071716, 0.9855015277862549, 0.9856444597244263, 0.9865096211433411, 0.9880427122116089, 0.9898584485054016, 0.991622269153595], "prob_old": [0.9126180410385132, 1.9406089180051822e-08, 3.435605322010815e-05, 7.742854359094054e-05, 6.301956545939902e-06, 1.7906154425872955e-06, 8.43748239276465e-07, 5.309246944307233e-07, 4.2452674620108155e-07, 3.6853916185464186e-07, 3.1700392355560325e-07, 2.6509411554798135e-07, 2.143058424053379e-07, 1.6924363421821909e-07, 1.3235954554602358e-07], "prob_new_token": [6.256305482565949e-07, 5.919981660440499e-08, 0.0013590655289590359, 0.7246102690696716, 0.9623414874076843, 0.9802103638648987, 0.9840967655181885, 0.9855157732963562, 0.9857186675071716, 0.9855015277862549, 0.9856444597244263, 0.9865096211433411, 0.9880427122116089, 0.9898584485054016, 0.991622269153595], "prob_old_token": [0.9126180410385132, 1.9406089180051822e-08, 3.435605322010815e-05, 7.742854359094054e-05, 6.301956545939902e-06, 1.7906154425872955e-06, 8.43748239276465e-07, 5.309246944307233e-07, 4.2452674620108155e-07, 3.6853916185464186e-07, 3.1700392355560325e-07, 2.6509411554798135e-07, 2.143058424053379e-07, 1.6924363421821909e-07, 1.3235954554602358e-07], "l1-model.layers.2.mlp.down_proj.weight": [90076.015625], "l2-model.layers.2.mlp.down_proj.weight": [15.549603462219238], "linf-model.layers.2.mlp.down_proj.weight": [0.006414714269340038], "request": {"prompt": "{} has originated in the country named", "subject": "golf", "target_new": {"str": "Sweden"}, "old_answer": {"str": "Scotland"}, "seed": 42}}, {"loss_per_step": [15.661, 17.547, 5.627, 0.456, 0.015, 0.003], "prob_new": [1.5791556506883353e-07, 2.396245335489766e-08, 0.0036003291606903076, 0.6337326765060425, 0.9854729771614075, 0.9967994093894958], "prob_old": [0.9126180410385132, 3.023873063057181e-08, 3.3990995689237025e-06, 1.607877311471384e-05, 1.2872006891484489e-06, 3.00877246672826e-07], "prob_new_token": [1.5791556506883353e-07, 2.396245335489766e-08, 0.0036003291606903076, 0.6337326765060425, 0.9854729771614075, 0.9967994093894958], "prob_old_token": [0.9126180410385132, 3.023873063057181e-08, 3.3990995689237025e-06, 1.607877311471384e-05, 1.2872006891484489e-06, 3.00877246672826e-07], "l1-model.layers.2.mlp.down_proj.weight": [57813.1640625], "l2-model.layers.2.mlp.down_proj.weight": [9.57358169555664], "linf-model.layers.2.mlp.down_proj.weight": [0.002448486629873514], "request": {"prompt": "{} has originated in the country named", "subject": "golf", "target_new": {"str": "Mexico"}, "old_answer": {"str": "Scotland"}, "seed": 42}}, {"loss_per_step": [7.14, 7.786, 1.33, 0.093, 0.004], "prob_new": [0.4924798011779785, 0.37831392884254456, 0.532516360282898, 0.9150475263595581, 0.9964804649353027], "prob_old": [0.939195990562439, 4.5320181385477554e-08, 5.917755743212183e-07, 1.1296646107439301e-07, 4.1557007834924775e-10], "prob_new_token": [6.375856287377246e-07, 2.283570239569599e-07, 0.07037284970283508, 0.8310495615005493, 0.9939858913421631], "prob_old_token": [0.939195990562439, 4.5320181385477554e-08, 5.917755743212183e-07, 1.1296646107439301e-07, 4.1557007834924775e-10], "l1-model.layers.2.mlp.down_proj.weight": [48233.6015625], "l2-model.layers.2.mlp.down_proj.weight": [8.060931205749512], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057596266269684], "request": {"prompt": "{} has originated in the country named", "subject": "Opeth", "target_new": {"str": "Tibet"}, "old_answer": {"str": "Sweden"}, "seed": 42}}, {"loss_per_step": [9.672, 9.766, 0.262, 0.017, 0.012, 0.012, 0.012, 0.013, 0.012, 0.012, 0.012, 0.011, 0.01], "prob_new": [6.302109977696091e-05, 5.738311665481888e-05, 0.769158661365509, 0.9831710457801819, 0.9881244897842407, 0.9882394075393677, 0.987776517868042, 0.9875126481056213, 0.9875999689102173, 0.9879702925682068, 0.9885639548301697, 0.9893417358398438, 0.9902506470680237], "prob_old": [0.939195990562439, 1.6737512851250358e-05, 0.0023144627921283245, 0.0001196536177303642, 5.0819380703615025e-05, 3.412915611988865e-05, 2.6107014491572045e-05, 2.0660481823142618e-05, 1.649545993132051e-05, 1.3233822755864821e-05, 1.063026866177097e-05, 8.520774827047717e-06, 6.8135009314573836e-06], "prob_new_token": [6.302109977696091e-05, 5.738311665481888e-05, 0.769158661365509, 0.9831710457801819, 0.9881244897842407, 0.9882394075393677, 0.987776517868042, 0.9875126481056213, 0.9875999689102173, 0.9879702925682068, 0.9885639548301697, 0.9893417358398438, 0.9902506470680237], "prob_old_token": [0.939195990562439, 1.6737512851250358e-05, 0.0023144627921283245, 0.0001196536177303642, 5.0819380703615025e-05, 3.412915611988865e-05, 2.6107014491572045e-05, 2.0660481823142618e-05, 1.649545993132051e-05, 1.3233822755864821e-05, 1.063026866177097e-05, 8.520774827047717e-06, 6.8135009314573836e-06], "l1-model.layers.2.mlp.down_proj.weight": [85170.2421875], "l2-model.layers.2.mlp.down_proj.weight": [14.532837867736816], "linf-model.layers.2.mlp.down_proj.weight": [0.006007143296301365], "request": {"prompt": "{} has originated in the country named", "subject": "Opeth", "target_new": {"str": "Germany"}, "old_answer": {"str": "Sweden"}, "seed": 42}}, {"loss_per_step": [5.662, 5.708, 3.485, 0.124, 0.033, 0.015, 0.01], "prob_new": [0.4989773631095886, 0.47944822907447815, 0.42408519983291626, 0.8897042274475098, 0.9682834148406982, 0.9848397374153137, 0.9903749227523804], "prob_old": [0.939195990562439, 1.6581489035161212e-06, 7.257648917402548e-07, 0.00010547941201366484, 2.1831312551512383e-05, 6.461631073761964e-06, 2.8547638066811487e-06], "prob_new_token": [1.2114125638618134e-05, 1.1495771104819141e-05, 0.0011099803959950805, 0.7848844528198242, 0.9404913187026978, 0.9731491208076477, 0.9836662411689758], "prob_old_token": [0.939195990562439, 1.6581489035161212e-06, 7.257648917402548e-07, 0.00010547941201366484, 2.1831312551512383e-05, 6.461631073761964e-06, 2.8547638066811487e-06], "l1-model.layers.2.mlp.down_proj.weight": [58960.68359375], "l2-model.layers.2.mlp.down_proj.weight": [10.205328941345215], "linf-model.layers.2.mlp.down_proj.weight": [0.0029552578926086426], "request": {"prompt": "{} has originated in the country named", "subject": "Opeth", "target_new": {"str": "Belgium"}, "old_answer": {"str": "Sweden"}, "seed": 42}}, {"loss_per_step": [15.09, 13.247, 2.055, 0.021, 0.011, 0.011, 0.011, 0.01], "prob_new": [2.7965683102593175e-07, 1.766127525115735e-06, 0.12809064984321594, 0.9796590805053711, 0.98897784948349, 0.9888257384300232, 0.9886849522590637, 0.9904990792274475], "prob_old": [0.9586530327796936, 0.4957602918148041, 0.49317458271980286, 0.4880291521549225, 0.4908646047115326, 0.4894707202911377, 0.48799094557762146, 0.48705241084098816], "prob_new_token": [2.7965683102593175e-07, 1.766127525115735e-06, 0.12809064984321594, 0.9796590805053711, 0.98897784948349, 0.9888257384300232, 0.9886849522590637, 0.9904990792274475], "prob_old_token": [0.9174381494522095, 1.1276303979457225e-07, 1.5349191016866826e-05, 3.860193587001959e-08, 1.534758631294153e-08, 1.553525308395365e-08, 1.694215079339756e-08, 1.5210790849096156e-08], "l1-model.layers.2.mlp.down_proj.weight": [71802.078125], "l2-model.layers.2.mlp.down_proj.weight": [11.652806282043457], "linf-model.layers.2.mlp.down_proj.weight": [0.003347862744703889], "request": {"prompt": "{} has originated in the country named", "subject": "Lhasa Apso", "target_new": {"str": "Japan"}, "old_answer": {"str": "Tibet"}, "seed": 42}}, {"loss_per_step": [4.999, 11.28, 1.418, 0.005], "prob_new": [0.006743642967194319, 1.2618472283065785e-05, 0.24210157990455627, 0.9947512149810791], "prob_old": [0.9586530327796936, 0.4963515102863312, 0.4969751834869385, 0.48367080092430115], "prob_new_token": [0.006743642967194319, 1.2618472283065785e-05, 0.24210157990455627, 0.9947512149810791], "prob_old_token": [0.9174381494522095, 5.116930879012216e-08, 7.968874342623167e-06, 1.5947522413739534e-08], "l1-model.layers.2.mlp.down_proj.weight": [42222.484375], "l2-model.layers.2.mlp.down_proj.weight": [6.878731727600098], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024836175143719], "request": {"prompt": "{} has originated in the country named", "subject": "Lhasa Apso", "target_new": {"str": "China"}, "old_answer": {"str": "Tibet"}, "seed": 42}}, {"loss_per_step": [14.077, 12.052, 2.008, 0.111, 0.034, 0.015, 0.007], "prob_new": [7.699086381762754e-07, 5.833873728988692e-06, 0.13429291546344757, 0.8953222632408142, 0.9666947722434998, 0.9855995178222656, 0.9927068948745728], "prob_old": [0.9586530327796936, 0.4763827919960022, 0.43511033058166504, 0.40280818939208984, 0.3655226528644562, 0.3282809555530548, 0.3035565912723541], "prob_new_token": [7.699086381762754e-07, 5.833873728988692e-06, 0.13429291546344757, 0.8953222632408142, 0.9666947722434998, 0.9855995178222656, 0.9927068948745728], "prob_old_token": [0.9174381494522095, 7.18919821451891e-08, 3.651726956377388e-06, 3.267795705141907e-07, 7.464427653758321e-08, 2.1342138722957316e-08, 7.557729553298032e-09], "l1-model.layers.2.mlp.down_proj.weight": [65268.69140625], "l2-model.layers.2.mlp.down_proj.weight": [10.658373832702637], "linf-model.layers.2.mlp.down_proj.weight": [0.0029531288892030716], "request": {"prompt": "{} has originated in the country named", "subject": "Lhasa Apso", "target_new": {"str": "Mexico"}, "old_answer": {"str": "Tibet"}, "seed": 42}}, {"loss_per_step": [12.069, 11.022, 4.309, 1.02, 0.023, 0.016, 0.013, 0.011, 0.01], "prob_new": [5.734047135774745e-06, 1.6330699509126134e-05, 0.013445764780044556, 0.36057668924331665, 0.9770963191986084, 0.9836723208427429, 0.9872903227806091, 0.9891787171363831, 0.9903709888458252], "prob_old": [0.9153781533241272, 2.5132969767582836e-06, 3.143627691315487e-05, 0.00010180809476878494, 2.4350604235223727e-06, 3.326525472857611e-07, 9.432429948219578e-08, 4.494057037618404e-08, 3.007331272897318e-08], "prob_new_token": [5.734047135774745e-06, 1.6330699509126134e-05, 0.013445764780044556, 0.36057668924331665, 0.9770963191986084, 0.9836723208427429, 0.9872903227806091, 0.9891787171363831, 0.9903709888458252], "prob_old_token": [0.9153781533241272, 2.5132969767582836e-06, 3.143627691315487e-05, 0.00010180809476878494, 2.4350604235223727e-06, 3.326525472857611e-07, 9.432429948219578e-08, 4.494057037618404e-08, 3.007331272897318e-08], "l1-model.layers.2.mlp.down_proj.weight": [70778.421875], "l2-model.layers.2.mlp.down_proj.weight": [12.04870891571045], "linf-model.layers.2.mlp.down_proj.weight": [0.003930844832211733], "request": {"prompt": "{} has originated in the country named", "subject": "Chihuahua", "target_new": {"str": "India"}, "old_answer": {"str": "Mexico"}, "seed": 42}}, {"loss_per_step": [7.687, 5.456, 2.415, 0.223, 0.033, 0.013, 0.006], "prob_new": [0.4975186884403229, 0.48999086022377014, 0.3109302818775177, 0.8054245710372925, 0.967902660369873, 0.987393856048584, 0.9936323761940002], "prob_old": [0.9153781533241272, 1.0723517334554344e-05, 6.178062449180288e-06, 1.656474341871217e-05, 1.4674030808237148e-06, 2.510636534225341e-07, 8.269683604567035e-08], "prob_new_token": [2.1145005746348033e-07, 1.8620832634042017e-05, 0.013127012178301811, 0.7144679427146912, 0.9612594246864319, 0.9853622317314148, 0.9932330250740051], "prob_old_token": [0.9153781533241272, 1.0723517334554344e-05, 6.178062449180288e-06, 1.656474341871217e-05, 1.4674030808237148e-06, 2.510636534225341e-07, 8.269683604567035e-08], "l1-model.layers.2.mlp.down_proj.weight": [60087.6953125], "l2-model.layers.2.mlp.down_proj.weight": [10.311927795410156], "linf-model.layers.2.mlp.down_proj.weight": [0.0029773488640785217], "request": {"prompt": "{} has originated in the country named", "subject": "Chihuahua", "target_new": {"str": "Hungary"}, "old_answer": {"str": "Mexico"}, "seed": 42}}, {"loss_per_step": [7.401, 5.161, 1.356, 0.311, 0.089, 0.035, 0.017, 0.009], "prob_new": [0.4085179567337036, 0.41180896759033203, 0.4170868396759033, 0.7644318342208862, 0.9177405834197998, 0.9657398462295532, 0.9835292100906372, 0.990620493888855], "prob_old": [0.9153781533241272, 8.848917900650122e-07, 1.1467137483123224e-05, 1.528732923361531e-06, 2.484081278453232e-07, 8.918240723687632e-08, 4.386370378028914e-08, 2.4780414165093134e-08], "prob_new_token": [4.5624409494848805e-07, 3.9960508729564026e-05, 0.08906275033950806, 0.5473905205726624, 0.83949875831604, 0.9333236217498779, 0.9683231115341187, 0.982293963432312], "prob_old_token": [0.9153781533241272, 8.848917900650122e-07, 1.1467137483123224e-05, 1.528732923361531e-06, 2.484081278453232e-07, 8.918240723687632e-08, 4.386370378028914e-08, 2.4780414165093134e-08], "l1-model.layers.2.mlp.down_proj.weight": [69426.6484375], "l2-model.layers.2.mlp.down_proj.weight": [11.520132064819336], "linf-model.layers.2.mlp.down_proj.weight": [0.003475494682788849], "request": {"prompt": "{} has originated in the country named", "subject": "Chihuahua", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Mexico"}, "seed": 42}}, {"loss_per_step": [6.108, 3.84, 1.406, 0.02, 0.001], "prob_new": [0.5470038652420044, 0.6626628637313843, 0.6682758331298828, 0.9807056784629822, 0.9985052347183228], "prob_old": [0.9767417907714844, 0.016583135351538658, 0.0011800630018115044, 6.517944939332665e-08, 5.18085740974783e-10], "prob_new_token": [1.7143428010513162e-08, 1.0061963621410541e-05, 0.014880869537591934, 0.947839081287384, 0.9993376731872559], "prob_old_token": [0.9767417907714844, 0.016583135351538658, 0.0011800630018115044, 6.517944939332665e-08, 5.18085740974783e-10], "l1-model.layers.2.mlp.down_proj.weight": [48397.10546875], "l2-model.layers.2.mlp.down_proj.weight": [8.125321388244629], "linf-model.layers.2.mlp.down_proj.weight": [0.0020055072382092476], "request": {"prompt": "{} died in the location of", "subject": "Jacques Chirac", "target_new": {"str": "Kolkata"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [7.548, 3.778, 1.165, 0.131, 0.034, 0.015, 0.009], "prob_new": [0.32708513736724854, 0.33879217505455017, 0.6624964475631714, 0.8915313482284546, 0.9678487777709961, 0.9849810004234314, 0.9912600517272949], "prob_old": [0.9767417907714844, 0.01008978858590126, 0.0009870798094198108, 6.214940367499366e-05, 3.8136788589326898e-06, 5.878858360119921e-07, 2.120595183896512e-07], "prob_new_token": [2.263786518597044e-05, 0.0006114016869105399, 0.03177531808614731, 0.6749135851860046, 0.9037200808525085, 0.9550424218177795, 0.9738520979881287], "prob_old_token": [0.9767417907714844, 0.01008978858590126, 0.0009870798094198108, 6.214940367499366e-05, 3.8136788589326898e-06, 5.878858360119921e-07, 2.120595183896512e-07], "l1-model.layers.2.mlp.down_proj.weight": [65291.62109375], "l2-model.layers.2.mlp.down_proj.weight": [10.694334983825684], "linf-model.layers.2.mlp.down_proj.weight": [0.0029983040876686573], "request": {"prompt": "{} died in the location of", "subject": "Jacques Chirac", "target_new": {"str": "Mumbai"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [9.789, 5.875, 2.109, 0.003], "prob_new": [0.06643293797969818, 0.4493551254272461, 0.5061262845993042, 0.9970749020576477], "prob_old": [0.9767417907714844, 0.004634866490960121, 2.7560778107726946e-05, 4.024171289529477e-07], "prob_new_token": [2.3674608939927566e-08, 8.785587851889431e-06, 0.014755564741790295, 0.9959858655929565], "prob_old_token": [0.9767417907714844, 0.004634866490960121, 2.7560778107726946e-05, 4.024171289529477e-07], "l1-model.layers.2.mlp.down_proj.weight": [42877.3828125], "l2-model.layers.2.mlp.down_proj.weight": [6.8911309242248535], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024826861917973], "request": {"prompt": "{} died in the location of", "subject": "Jacques Chirac", "target_new": {"str": "Krak\u00f3w"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [7.565, 5.56, 0.323, 0.02, 0.011, 0.003], "prob_new": [0.10018701106309891, 0.24751625955104828, 0.7600921392440796, 0.9800142645835876, 0.9895588159561157, 0.9971429109573364], "prob_old": [0.9939347505569458, 0.3358688950538635, 0.2543237805366516, 0.30393749475479126, 0.30065974593162537, 0.2839489281177521], "prob_new_token": [1.3404504670688766e-06, 2.990439134009648e-05, 0.9922464489936829, 0.9624626636505127, 0.9791447520256042, 0.994290292263031], "prob_old_token": [0.9818651676177979, 0.0005797542980872095, 1.9064902517129667e-05, 0.00021832801576238126, 0.00011673892004182562, 2.688384847715497e-05], "l1-model.layers.2.mlp.down_proj.weight": [58423.875], "l2-model.layers.2.mlp.down_proj.weight": [9.605265617370605], "linf-model.layers.2.mlp.down_proj.weight": [0.002503843978047371], "request": {"prompt": "{} died in the location of", "subject": "Madhubala", "target_new": {"str": "Krak\u00f3w"}, "old_answer": {"str": "Mumbai"}, "seed": 42}}, {"loss_per_step": [4.772, 1.411, 0.013, 0.002], "prob_new": [0.3680431544780731, 0.6713337898254395, 0.9875987768173218, 0.9983029365539551], "prob_old": [0.9939347505569458, 0.3345012664794922, 0.33288833498954773, 0.33301234245300293], "prob_new_token": [5.816480097564636e-06, 0.014531650580465794, 0.9630315899848938, 0.9951425790786743], "prob_old_token": [0.9818651676177979, 0.0008143152226693928, 5.4495765652973205e-05, 6.951132490939926e-06], "l1-model.layers.2.mlp.down_proj.weight": [45044.640625], "l2-model.layers.2.mlp.down_proj.weight": [7.0984063148498535], "linf-model.layers.2.mlp.down_proj.weight": [0.00150238536298275], "request": {"prompt": "{} died in the location of", "subject": "Madhubala", "target_new": {"str": "Damascus"}, "old_answer": {"str": "Mumbai"}, "seed": 42}}, {"loss_per_step": [8.414, 3.866, 0.412, 0.047, 0.018, 0.011, 0.008], "prob_new": [0.3241996765136719, 0.3299423158168793, 0.7015036344528198, 0.9546090960502625, 0.9822186231613159, 0.9891567230224609, 0.992440402507782], "prob_old": [0.9939347505569458, 0.3275171220302582, 0.3015263080596924, 0.3047943711280823, 0.29790791869163513, 0.29507502913475037, 0.2989635169506073], "prob_new_token": [8.677259870637499e-07, 0.0003309096209704876, 0.4231453239917755, 0.9169848561286926, 0.9729554653167725, 0.9835498929023743, 0.9862761497497559], "prob_old_token": [0.9818651676177979, 0.005416981875896454, 0.0010906727984547615, 5.891880573472008e-05, 2.3309406969929114e-05, 1.691756915533915e-05, 1.4931611076463014e-05], "l1-model.layers.2.mlp.down_proj.weight": [63960.15625], "l2-model.layers.2.mlp.down_proj.weight": [10.582860946655273], "linf-model.layers.2.mlp.down_proj.weight": [0.002968273125588894], "request": {"prompt": "{} died in the location of", "subject": "Madhubala", "target_new": {"str": "Shiraz"}, "old_answer": {"str": "Mumbai"}, "seed": 42}}, {"loss_per_step": [10.222, 2.928, 0.062, 0.007], "prob_new": [0.06956960260868073, 0.28303152322769165, 0.940031886100769, 0.9933481216430664], "prob_old": [0.8691045641899109, 0.4752775728702545, 0.36237552762031555, 0.2682860493659973], "prob_new_token": [9.506368137124355e-09, 0.00510634807869792, 0.9418071508407593, 0.9910755157470703], "prob_old_token": [0.9905773997306824, 1.189693080050347e-06, 5.559930286835879e-06, 1.2192451492865075e-07], "l1-model.layers.2.mlp.down_proj.weight": [43964.95703125], "l2-model.layers.2.mlp.down_proj.weight": [7.048197269439697], "linf-model.layers.2.mlp.down_proj.weight": [0.001502402825281024], "request": {"prompt": "{} died in the location of", "subject": "Hypatia", "target_new": {"str": "Krak\u00f3w"}, "old_answer": {"str": "Alexandria, Egypt"}, "seed": 42}}, {"loss_per_step": [3.421, 1.752, 0.042, 0.028, 0.018, 0.013, 0.008], "prob_new": [0.5292474031448364, 0.7496219873428345, 0.9611043930053711, 0.9736908078193665, 0.9824597835540771, 0.9875737428665161, 0.9919756650924683], "prob_old": [0.8691045641899109, 0.578323245048523, 0.5915564894676208, 0.5975175499916077, 0.5901821851730347, 0.5788180828094482, 0.566684901714325], "prob_new_token": [8.235118912125472e-06, 0.0009059918811544776, 0.8453840613365173, 0.8957173824310303, 0.9308428764343262, 0.9513035416603088, 0.968885600566864], "prob_old_token": [0.9905773997306824, 4.475318201002665e-05, 3.191224095644429e-05, 1.7314316210104153e-05, 8.602292837167624e-06, 5.436353148979833e-06, 3.522413862810936e-06], "l1-model.layers.2.mlp.down_proj.weight": [61594.3828125], "l2-model.layers.2.mlp.down_proj.weight": [10.423770904541016], "linf-model.layers.2.mlp.down_proj.weight": [0.0030074939131736755], "request": {"prompt": "{} died in the location of", "subject": "Hypatia", "target_new": {"str": "Ephesus"}, "old_answer": {"str": "Alexandria, Egypt"}, "seed": 42}}, {"loss_per_step": [18.261, 4.326, 1.035, 0.03, 0.022, 0.015, 0.01, 0.007], "prob_new": [1.1735775906629442e-08, 0.01322105247527361, 0.35527583956718445, 0.9704005122184753, 0.9786037802696228, 0.985066294670105, 0.989905059337616, 0.9927685260772705], "prob_old": [0.8691045641899109, 0.4657008647918701, 0.38447004556655884, 0.3505570888519287, 0.3389023542404175, 0.32340943813323975, 0.31242311000823975, 0.30555111169815063], "prob_new_token": [1.1735775906629442e-08, 0.01322105247527361, 0.35527583956718445, 0.9704005122184753, 0.9786037802696228, 0.985066294670105, 0.989905059337616, 0.9927685260772705], "prob_old_token": [0.9905773997306824, 1.9746961697819643e-05, 1.648563011258375e-05, 8.5258193394111e-07, 5.167394760974275e-07, 2.4193741410272196e-07, 1.249411667458844e-07, 7.147701808207785e-08], "l1-model.layers.2.mlp.down_proj.weight": [69467.765625], "l2-model.layers.2.mlp.down_proj.weight": [11.404423713684082], "linf-model.layers.2.mlp.down_proj.weight": [0.0034508579410612583], "request": {"prompt": "{} died in the location of", "subject": "Hypatia", "target_new": {"str": "Moscow"}, "old_answer": {"str": "Alexandria, Egypt"}, "seed": 42}}, {"loss_per_step": [3.301, 2.605, 1.638, 0.479, 0.073, 0.017, 0.007], "prob_new": [0.5269760489463806, 0.4559147357940674, 0.7023071050643921, 0.802239716053009, 0.9343789219856262, 0.9829597473144531, 0.9929079413414001], "prob_old": [0.9740200042724609, 0.48731276392936707, 0.6418470144271851, 0.6967993378639221, 0.5767472386360168, 0.5778708457946777, 0.5826920866966248], "prob_new_token": [2.6284446903446224e-06, 0.00012840167619287968, 0.0004878828185610473, 0.09983133524656296, 0.7521301507949829, 0.9567376375198364, 0.9857059717178345], "prob_old_token": [0.9612975716590881, 0.00011561887367861345, 0.00012284546392038465, 4.476602771319449e-05, 2.3130281988414936e-05, 1.4001744602865074e-05, 8.501643605995923e-06], "l1-model.layers.2.mlp.down_proj.weight": [59086.96875], "l2-model.layers.2.mlp.down_proj.weight": [10.261548042297363], "linf-model.layers.2.mlp.down_proj.weight": [0.0029970072209835052], "request": {"prompt": "{} died in the location of", "subject": "Martin Luther King Jr.", "target_new": {"str": "Amherst, Massachusetts"}, "old_answer": {"str": "Memphis, Tennessee"}, "seed": 42}}, {"loss_per_step": [5.63, 3.906, 1.312, 0.158, 0.019, 0.011, 0.009], "prob_new": [0.44226107001304626, 0.32871532440185547, 0.6514557600021362, 0.8712964057922363, 0.9814630746841431, 0.9889386892318726, 0.9907686114311218], "prob_old": [0.9740200042724609, 0.49724626541137695, 0.5910220146179199, 0.6019890904426575, 0.6013315916061401, 0.5941212773323059, 0.590786874294281], "prob_new_token": [1.0730728661201283e-07, 0.00019734978559426963, 0.02090493217110634, 0.6393716931343079, 0.9614658951759338, 0.9842962026596069, 0.9905263185501099], "prob_old_token": [0.9612975716590881, 0.0004766671627294272, 0.00040575015009380877, 0.00033455624361522496, 3.9578397263539955e-05, 1.2416740901244339e-05, 7.506868314521853e-06], "l1-model.layers.2.mlp.down_proj.weight": [60351.9375], "l2-model.layers.2.mlp.down_proj.weight": [10.352104187011719], "linf-model.layers.2.mlp.down_proj.weight": [0.0029265955090522766], "request": {"prompt": "{} died in the location of", "subject": "Martin Luther King Jr.", "target_new": {"str": "Berlin, Germany"}, "old_answer": {"str": "Memphis, Tennessee"}, "seed": 42}}, {"loss_per_step": [7.06, 7.213, 3.784, 2.039, 1.904, 0.279, 0.013, 0.01], "prob_new": [0.5950192809104919, 0.16908681392669678, 0.4324655532836914, 0.5905011892318726, 0.564076840877533, 0.803220272064209, 0.9871530532836914, 0.9905235767364502], "prob_old": [0.9740200042724609, 0.4334781765937805, 0.5227861404418945, 0.711030900478363, 0.4654838740825653, 0.5308027267456055, 0.5544251203536987, 0.5612679123878479], "prob_new_token": [7.972156823576881e-10, 3.586921764053841e-07, 3.147491588606499e-05, 0.0028244759887456894, 0.004695443902164698, 0.45191919803619385, 0.9865314364433289, 0.9963907599449158], "prob_old_token": [0.9612975716590881, 0.00022744626039639115, 0.0009951726533472538, 0.011453952640295029, 0.00020469608716666698, 0.00024149288947228342, 2.848612211892032e-06, 8.351104838766332e-07], "l1-model.layers.2.mlp.down_proj.weight": [62021.0234375], "l2-model.layers.2.mlp.down_proj.weight": [10.773642539978027], "linf-model.layers.2.mlp.down_proj.weight": [0.003459145314991474], "request": {"prompt": "{} died in the location of", "subject": "Martin Luther King Jr.", "target_new": {"str": "Edinburgh, Scotland"}, "old_answer": {"str": "Memphis, Tennessee"}, "seed": 42}}, {"loss_per_step": [6.248, 4.541, 1.417, 0.319, 0.054, 0.012, 0.006], "prob_new": [0.3277120590209961, 0.3245965242385864, 0.6465352773666382, 0.7941040992736816, 0.9503016471862793, 0.9885866045951843, 0.9936542510986328], "prob_old": [0.6556589007377625, 0.26645416021347046, 0.27414020895957947, 0.2720509171485901, 0.2379891574382782, 0.22664189338684082, 0.23439590632915497], "prob_new_token": [1.3061694517091382e-05, 0.0005447076982818544, 0.015407311730086803, 0.3856460154056549, 0.8514938354492188, 0.9660465717315674, 0.9812049865722656], "prob_old_token": [0.9493682980537415, 3.129012156932731e-06, 2.716012932069134e-05, 1.5552233890048228e-05, 3.544651463016635e-06, 6.005188311064558e-07, 2.7622797915682895e-07], "l1-model.layers.2.mlp.down_proj.weight": [64239.1640625], "l2-model.layers.2.mlp.down_proj.weight": [10.602736473083496], "linf-model.layers.2.mlp.down_proj.weight": [0.002967176027595997], "request": {"prompt": "{} died in the location of", "subject": "Richard von Weizs\u00e4cker", "target_new": {"str": "Mumbai"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [11.578, 7.494, 0.983, 0.013, 0.009], "prob_new": [9.372754902869929e-06, 0.0005564677994698286, 0.37437209486961365, 0.9871493577957153, 0.9911839365959167], "prob_old": [0.6556589007377625, 0.2509879767894745, 0.13406233489513397, 0.5108335018157959, 0.30351972579956055], "prob_new_token": [9.372754902869929e-06, 0.0005564677994698286, 0.37437209486961365, 0.9871493577957153, 0.9911839365959167], "prob_old_token": [0.9493682980537415, 0.002862196648493409, 0.0032507223077118397, 0.00012595791486091912, 2.9652577723027207e-05], "l1-model.layers.2.mlp.down_proj.weight": [47731.171875], "l2-model.layers.2.mlp.down_proj.weight": [8.063982963562012], "linf-model.layers.2.mlp.down_proj.weight": [0.001985798589885235], "request": {"prompt": "{} died in the location of", "subject": "Richard von Weizs\u00e4cker", "target_new": {"str": "Paris"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [4.177, 1.66, 0.025, 0.014, 0.01], "prob_new": [0.4497879147529602, 0.5274800658226013, 0.9757511019706726, 0.9861869812011719, 0.9903367161750793], "prob_old": [0.6556589007377625, 0.2029070109128952, 0.3201385736465454, 0.3926171660423279, 0.43373554944992065], "prob_new_token": [8.850669473758899e-06, 0.006625205744057894, 0.9739012718200684, 0.9837563633918762, 0.9887217879295349], "prob_old_token": [0.9493682980537415, 4.388439629110508e-05, 5.695621325685352e-07, 4.035843232941261e-07, 3.2539693961552985e-07], "l1-model.layers.2.mlp.down_proj.weight": [52056.53515625], "l2-model.layers.2.mlp.down_proj.weight": [8.397005081176758], "linf-model.layers.2.mlp.down_proj.weight": [0.0020039896480739117], "request": {"prompt": "{} died in the location of", "subject": "Richard von Weizs\u00e4cker", "target_new": {"str": "Amherst, Massachusetts"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [3.222, 1.292, 0.021, 0.003], "prob_new": [0.39632150530815125, 0.6217606067657471, 0.9797041416168213, 0.9973964691162109], "prob_old": [0.9858884811401367, 0.6194807291030884, 0.6633371114730835, 0.6646488904953003], "prob_new_token": [0.0003362603601999581, 0.024640383198857307, 0.9438471794128418, 0.9929580092430115], "prob_old_token": [0.9584534764289856, 9.479851723881438e-05, 2.6015452021965757e-05, 1.893827970889106e-06], "l1-model.layers.2.mlp.down_proj.weight": [42356.0703125], "l2-model.layers.2.mlp.down_proj.weight": [6.912619113922119], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024134190753102], "request": {"prompt": "{} died in the location of", "subject": "Asima Chatterjee", "target_new": {"str": "Shillong"}, "old_answer": {"str": "Kolkata"}, "seed": 42}}, {"loss_per_step": [4.707, 2.618, 0.036, 0.007], "prob_new": [0.35521623492240906, 0.42990899085998535, 0.9656394720077515, 0.9927961826324463], "prob_old": [0.9858884811401367, 0.6518455743789673, 0.6642845869064331, 0.664284348487854], "prob_new_token": [1.1065395483456086e-05, 0.001340476213954389, 0.8988330364227295, 0.9795786738395691], "prob_old_token": [0.9584534764289856, 8.83484972291626e-06, 8.590361062488228e-07, 4.962818707099359e-07], "l1-model.layers.2.mlp.down_proj.weight": [42580.3828125], "l2-model.layers.2.mlp.down_proj.weight": [6.922927379608154], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024375170469284], "request": {"prompt": "{} died in the location of", "subject": "Asima Chatterjee", "target_new": {"str": "Beijing"}, "old_answer": {"str": "Kolkata"}, "seed": 42}}, {"loss_per_step": [5.504, 3.097, 1.91, 1.005, 0.238, 0.026, 0.006], "prob_new": [0.4236910939216614, 0.4805416166782379, 0.552592396736145, 0.7251558899879456, 0.8428372144699097, 0.9755146503448486, 0.9937275648117065], "prob_old": [0.9858884811401367, 0.6446741819381714, 0.605647087097168, 0.5570859909057617, 0.535376787185669, 0.43937134742736816, 0.3753381669521332], "prob_new_token": [1.492208070885681e-06, 0.0010226413141936064, 0.0017110821790993214, 0.02034699358046055, 0.3949882388114929, 0.9075422286987305, 0.9769879579544067], "prob_old_token": [0.9584534764289856, 1.1847474752357812e-06, 1.159066323452862e-05, 3.988683602074161e-05, 1.6113388483063318e-05, 1.643300606701814e-06, 2.455920480315399e-07], "l1-model.layers.2.mlp.down_proj.weight": [64557.703125], "l2-model.layers.2.mlp.down_proj.weight": [10.585124015808105], "linf-model.layers.2.mlp.down_proj.weight": [0.0029754647985100746], "request": {"prompt": "{} died in the location of", "subject": "Asima Chatterjee", "target_new": {"str": "Ephesus"}, "old_answer": {"str": "Kolkata"}, "seed": 42}}, {"loss_per_step": [7.069, 4.101, 0.954, 0.012, 0.01], "prob_new": [0.46050527691841125, 0.4965706169605255, 0.5681635737419128, 0.9878144264221191, 0.9903033971786499], "prob_old": [0.9900690913200378, 0.6600769758224487, 0.6434059143066406, 0.6511961221694946, 0.6493099331855774], "prob_new_token": [7.861456197133521e-07, 0.0002758842892944813, 0.1504519134759903, 0.9948784112930298, 0.9971017837524414], "prob_old_token": [0.9709784388542175, 8.151328643180022e-07, 5.0032330278781956e-08, 2.376330243691882e-10, 2.435487367336009e-10], "l1-model.layers.2.mlp.down_proj.weight": [50073.98046875], "l2-model.layers.2.mlp.down_proj.weight": [8.253503799438477], "linf-model.layers.2.mlp.down_proj.weight": [0.002003914676606655], "request": {"prompt": "{} died in the location of", "subject": "Ibn Arabi", "target_new": {"str": "Mexico City"}, "old_answer": {"str": "Damascus"}, "seed": 42}}, {"loss_per_step": [4.605, 1.952, 0.141, 0.024, 0.016, 0.005], "prob_new": [0.4857449531555176, 0.5791327357292175, 0.8869230151176453, 0.9760625958442688, 0.9839540719985962, 0.9952532052993774], "prob_old": [0.9900690913200378, 0.6501312851905823, 0.6597464680671692, 0.6558177471160889, 0.6519810557365417, 0.654901921749115], "prob_new_token": [3.322439079056494e-05, 0.0005218435544520617, 0.5629934072494507, 0.9471032023429871, 0.9878435134887695, 0.9919089674949646], "prob_old_token": [0.9709784388542175, 4.4742187128576916e-06, 1.3266221685626078e-05, 1.9479172408409795e-07, 6.259584495182935e-08, 6.970256549720943e-08], "l1-model.layers.2.mlp.down_proj.weight": [56249.13671875], "l2-model.layers.2.mlp.down_proj.weight": [9.454503059387207], "linf-model.layers.2.mlp.down_proj.weight": [0.0025075003504753113], "request": {"prompt": "{} died in the location of", "subject": "Ibn Arabi", "target_new": {"str": "Amherst, Massachusetts"}, "old_answer": {"str": "Damascus"}, "seed": 42}}, {"loss_per_step": [5.4, 2.333, 0.051, 0.001], "prob_new": [0.390082448720932, 0.6663063764572144, 0.9527029991149902, 0.9990279674530029], "prob_old": [0.9900690913200378, 0.6535806655883789, 0.4506920278072357, 0.4774104058742523], "prob_new_token": [5.321560365700861e-07, 0.0009156679734587669, 0.8610143065452576, 0.9991270899772644], "prob_old_token": [0.9709784388542175, 4.743694717035396e-07, 4.119927723422734e-07, 2.3370190227467447e-09], "l1-model.layers.2.mlp.down_proj.weight": [42388.1953125], "l2-model.layers.2.mlp.down_proj.weight": [6.929121017456055], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024635940790176], "request": {"prompt": "{} died in the location of", "subject": "Ibn Arabi", "target_new": {"str": "Kolkata"}, "old_answer": {"str": "Damascus"}, "seed": 42}}, {"loss_per_step": [9.557, 1.841, 1.984, 0.003], "prob_new": [7.073378947097808e-05, 0.15862426161766052, 0.1374933123588562, 0.9973602294921875], "prob_old": [0.9349538087844849, 0.4364592730998993, 0.4643467962741852, 0.45073503255844116], "prob_new_token": [7.073378947097808e-05, 0.15862426161766052, 0.1374933123588562, 0.9973602294921875], "prob_old_token": [0.9106062650680542, 0.0017502758419141173, 0.0006686895503662527, 1.586304460943211e-05], "l1-model.layers.2.mlp.down_proj.weight": [39706.8828125], "l2-model.layers.2.mlp.down_proj.weight": [6.695879936218262], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} died in the location of", "subject": "Octavio Paz", "target_new": {"str": "Rome"}, "old_answer": {"str": "Mexico City"}, "seed": 42}}, {"loss_per_step": [5.977, 4.401, 1.413, 0.791, 0.035, 0.011, 0.006], "prob_new": [0.17892518639564514, 0.29272496700286865, 0.5855512022972107, 0.5900174975395203, 0.9658899307250977, 0.9893857836723328, 0.9940916895866394], "prob_old": [0.9349538087844849, 0.49635350704193115, 0.49114248156547546, 0.46626579761505127, 0.4766117036342621, 0.47286882996559143, 0.4636610746383667], "prob_new_token": [2.672127834557614e-07, 2.351786861254368e-05, 0.01911996118724346, 0.14103823900222778, 0.9875673651695251, 0.9936837553977966, 0.9951698780059814], "prob_old_token": [0.9106062650680542, 0.0017909298185259104, 0.002258955966681242, 0.0002585516485851258, 7.859305242163828e-07, 2.7207491370973e-07, 2.531416214424098e-07], "l1-model.layers.2.mlp.down_proj.weight": [60159.9140625], "l2-model.layers.2.mlp.down_proj.weight": [10.287975311279297], "linf-model.layers.2.mlp.down_proj.weight": [0.002973198890686035], "request": {"prompt": "{} died in the location of", "subject": "Octavio Paz", "target_new": {"str": "Berlin, Germany"}, "old_answer": {"str": "Mexico City"}, "seed": 42}}, {"loss_per_step": [5.364, 3.321, 0.066, 0.001], "prob_new": [0.6635054349899292, 0.497611403465271, 0.9398064017295837, 0.9993183016777039], "prob_old": [0.9349538087844849, 0.47671911120414734, 0.25849154591560364, 0.12952940165996552], "prob_new_token": [1.0352722057405117e-07, 9.504356421530247e-05, 0.8225283026695251, 0.9987463355064392], "prob_old_token": [0.9106062650680542, 2.9496100978576578e-05, 5.6692846555961296e-05, 4.817345029550779e-07], "l1-model.layers.2.mlp.down_proj.weight": [41485.5234375], "l2-model.layers.2.mlp.down_proj.weight": [6.853471279144287], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024682506918907], "request": {"prompt": "{} died in the location of", "subject": "Octavio Paz", "target_new": {"str": "Kolkata"}, "old_answer": {"str": "Mexico City"}, "seed": 42}}, {"loss_per_step": [7.945, 3.071, 1.874, 0.851, 0.225, 0.097, 0.036, 0.016, 0.009], "prob_new": [0.30098068714141846, 0.3396327495574951, 0.5215317606925964, 0.6702951788902283, 0.8359290957450867, 0.9156453013420105, 0.965861976146698, 0.984684407711029, 0.9913800954818726], "prob_old": [0.9704519510269165, 0.605426549911499, 0.3382938504219055, 0.3401356339454651, 0.3371818959712982, 0.33108365535736084, 0.32621389627456665, 0.31614696979522705, 0.28600940108299255], "prob_new_token": [3.2777399610495195e-05, 0.007958656176924706, 0.006469326559454203, 0.08400530368089676, 0.5117312669754028, 0.7495136857032776, 0.8996702432632446, 0.9560747742652893, 0.9762045741081238], "prob_old_token": [0.9116105437278748, 0.008244207128882408, 0.00010942703374894336, 0.0001146443173638545, 3.582901626941748e-05, 1.4765550076845102e-05, 6.638037575612543e-06, 3.3817686926340684e-06, 1.939887170010479e-06], "l1-model.layers.2.mlp.down_proj.weight": [73670.2109375], "l2-model.layers.2.mlp.down_proj.weight": [12.260306358337402], "linf-model.layers.2.mlp.down_proj.weight": [0.003946412354707718], "request": {"prompt": "{} died in the location of", "subject": "Li Peng", "target_new": {"str": "Shillong"}, "old_answer": {"str": "Beijing"}, "seed": 42}}, {"loss_per_step": [7.477, 3.334, 2.048, 1.065, 0.301, 0.027, 0.04, 0.006], "prob_new": [0.31846171617507935, 0.27942049503326416, 0.4590328633785248, 0.6765393018722534, 0.8009423017501831, 0.9741401076316833, 0.9620732069015503, 0.9941349029541016], "prob_old": [0.9704519510269165, 0.5513770580291748, 0.4613272547721863, 0.43133699893951416, 0.3408917784690857, 0.3349059224128723, 0.3338314890861511, 0.3339715600013733], "prob_new_token": [3.2777399610495195e-05, 0.007287187967449427, 0.005752640776336193, 0.04139496386051178, 0.40644001960754395, 0.9246644377708435, 0.8899500370025635, 0.9874309301376343], "prob_old_token": [0.9116105437278748, 0.004948515444993973, 0.0018967996584251523, 0.011652464047074318, 0.002588630188256502, 7.910536805866286e-05, 1.3284997294249479e-05, 2.310075160494307e-06], "l1-model.layers.2.mlp.down_proj.weight": [69683.28125], "l2-model.layers.2.mlp.down_proj.weight": [11.436971664428711], "linf-model.layers.2.mlp.down_proj.weight": [0.003471696749329567], "request": {"prompt": "{} died in the location of", "subject": "Li Peng", "target_new": {"str": "Shiraz"}, "old_answer": {"str": "Beijing"}, "seed": 42}}, {"loss_per_step": [5.478, 4.358, 1.68, 0.801, 0.13, 0.024, 0.008], "prob_new": [0.33442091941833496, 0.32834404706954956, 0.647241473197937, 0.6938111186027527, 0.8919280767440796, 0.9768422842025757, 0.9920793771743774], "prob_old": [0.9704519510269165, 0.4191376864910126, 0.4382975101470947, 0.35020339488983154, 0.3456442952156067, 0.34388095140457153, 0.34359312057495117], "prob_new_token": [1.6218966266023926e-05, 0.0008074282086454332, 0.006930050905793905, 0.09140029549598694, 0.6772128343582153, 0.9310937523841858, 0.9765224456787109], "prob_old_token": [0.9116105437278748, 4.117543721804395e-05, 0.000120083088404499, 0.0003430200449656695, 0.00017187448975164443, 1.6230711480602622e-05, 2.9317918688320788e-06], "l1-model.layers.2.mlp.down_proj.weight": [63995.54296875], "l2-model.layers.2.mlp.down_proj.weight": [10.564925193786621], "linf-model.layers.2.mlp.down_proj.weight": [0.0029919189400970936], "request": {"prompt": "{} died in the location of", "subject": "Li Peng", "target_new": {"str": "Mumbai"}, "old_answer": {"str": "Beijing"}, "seed": 42}}, {"loss_per_step": [4.657, 3.509, 0.69, 0.003], "prob_new": [0.35003572702407837, 0.6406457424163818, 0.7075475454330444, 0.9967978000640869], "prob_old": [0.8723492622375488, 0.2512975335121155, 0.3202514350414276, 0.2629229724407196], "prob_new_token": [1.2515833077486604e-05, 2.9056896892143413e-05, 0.12657852470874786, 0.9914929866790771], "prob_old_token": [0.9780231714248657, 1.8602823956825887e-06, 4.791295538097984e-08, 4.2519673115748446e-08], "l1-model.layers.2.mlp.down_proj.weight": [42273.09765625], "l2-model.layers.2.mlp.down_proj.weight": [6.824197292327881], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "{} died in the location of", "subject": "Stanis\u0142aw Lem", "target_new": {"str": "Kolkata"}, "old_answer": {"str": "Krak\u00f3w"}, "seed": 42}}, {"loss_per_step": [8.944, 4.991, 2.589, 0.029, 0.005], "prob_new": [0.4990740120410919, 0.28768667578697205, 0.5021867752075195, 0.9720253944396973, 0.995210587978363], "prob_old": [0.8723492622375488, 0.29069775342941284, 0.020750489085912704, 0.03666215017437935, 0.022705795243382454], "prob_new_token": [1.7060543200386746e-08, 8.027504372876137e-05, 0.0056444201618433, 0.9444824457168579, 0.99078768491745], "prob_old_token": [0.9780231714248657, 2.327868241991382e-05, 6.391194347088458e-06, 7.978314329193381e-07, 1.059376373291343e-07], "l1-model.layers.2.mlp.down_proj.weight": [51384.0703125], "l2-model.layers.2.mlp.down_proj.weight": [8.334678649902344], "linf-model.layers.2.mlp.down_proj.weight": [0.0020012781023979187], "request": {"prompt": "{} died in the location of", "subject": "Stanis\u0142aw Lem", "target_new": {"str": "Constantinople"}, "old_answer": {"str": "Krak\u00f3w"}, "seed": 42}}, {"loss_per_step": [4.386, 4.306, 1.313, 0.029, 0.035, 0.024, 0.015, 0.011, 0.007], "prob_new": [0.34641045331954956, 0.19327546656131744, 0.698750376701355, 0.9727529287338257, 0.9673753976821899, 0.9774100184440613, 0.9851027727127075, 0.9895771145820618, 0.992841899394989], "prob_old": [0.8723492622375488, 0.3482148051261902, 0.22369636595249176, 0.340615451335907, 0.4178412854671478, 0.4380790591239929, 0.44463422894477844, 0.4450239837169647, 0.4430481195449829], "prob_new_token": [7.139748777262866e-05, 0.00016698078252375126, 0.006595705170184374, 0.8941254019737244, 0.8718355298042297, 0.9118973016738892, 0.9426083564758301, 0.9605594277381897, 0.9736734628677368], "prob_old_token": [0.9780231714248657, 1.8785231077345088e-05, 6.131150485089165e-07, 7.833582316152388e-08, 1.3042225077697367e-07, 1.3155340639059432e-07, 1.2882561861715658e-07, 1.2416633410339273e-07, 1.0277408790670961e-07], "l1-model.layers.2.mlp.down_proj.weight": [69820.296875], "l2-model.layers.2.mlp.down_proj.weight": [12.05868911743164], "linf-model.layers.2.mlp.down_proj.weight": [0.003964778035879135], "request": {"prompt": "{} died in the location of", "subject": "Stanis\u0142aw Lem", "target_new": {"str": "Cairo, Egypt"}, "old_answer": {"str": "Krak\u00f3w"}, "seed": 42}}, {"loss_per_step": [3.629, 2.318, 0.332, 0.016, 0.01], "prob_new": [0.4039580821990967, 0.3669896721839905, 0.8151542544364929, 0.9842637777328491, 0.9905956387519836], "prob_old": [0.9370430111885071, 0.001188945141620934, 0.00016603009134996682, 1.0827904588950332e-05, 4.361625997262308e-06], "prob_new_token": [0.0004283804155420512, 0.0023072869516909122, 0.2667674720287323, 0.9390506148338318, 0.9649571180343628], "prob_old_token": [0.9370430111885071, 0.001188945141620934, 0.00016603009134996682, 1.0827904588950332e-05, 4.361625997262308e-06], "l1-model.layers.2.mlp.down_proj.weight": [44091.0078125], "l2-model.layers.2.mlp.down_proj.weight": [7.808893203735352], "linf-model.layers.2.mlp.down_proj.weight": [0.0019949041306972504], "request": {"prompt": "{} died in the location of", "subject": "Paul the Apostle", "target_new": {"str": "Cairo, Egypt"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [9.835, 5.578, 0.908, 0.008], "prob_new": [0.1005231961607933, 0.031772173941135406, 0.5691941380500793, 0.9918819665908813], "prob_old": [0.9370430111885071, 1.753322248987388e-05, 0.00928362738341093, 1.0357156497775577e-05], "prob_new_token": [1.4261061664910812e-08, 0.00022569799330085516, 0.1675950139760971, 0.9977561831474304], "prob_old_token": [0.9370430111885071, 1.753322248987388e-05, 0.00928362738341093, 1.0357156497775577e-05], "l1-model.layers.2.mlp.down_proj.weight": [38810.25], "l2-model.layers.2.mlp.down_proj.weight": [6.633139133453369], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} died in the location of", "subject": "Paul the Apostle", "target_new": {"str": "Krak\u00f3w"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [3.112, 2.363, 0.466, 0.02, 0.007], "prob_new": [0.5162143111228943, 0.5292692184448242, 0.7736274600028992, 0.9804572463035583, 0.9934951066970825], "prob_old": [0.9370430111885071, 0.0034353206865489483, 0.012713118456304073, 0.00016185612184926867, 2.242791924800258e-05], "prob_new_token": [0.000313876720611006, 0.0002628402435220778, 0.12815453112125397, 0.955012321472168, 0.9868265986442566], "prob_old_token": [0.9370430111885071, 0.0034353206865489483, 0.012713118456304073, 0.00016185612184926867, 2.242791924800258e-05], "l1-model.layers.2.mlp.down_proj.weight": [51756.484375], "l2-model.layers.2.mlp.down_proj.weight": [8.36170768737793], "linf-model.layers.2.mlp.down_proj.weight": [0.002002852037549019], "request": {"prompt": "{} died in the location of", "subject": "Paul the Apostle", "target_new": {"str": "Memphis, Tennessee"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [9.616, 6.102, 0.382, 0.002], "prob_new": [0.4995979368686676, 0.4930747449398041, 0.7322249412536621, 0.9975396394729614], "prob_old": [0.9224323630332947, 5.522792343981564e-05, 0.00019241323752794415, 6.249069429031806e-06], "prob_new_token": [4.444680623549857e-09, 5.081799827166833e-06, 0.46690845489501953, 0.996047854423523], "prob_old_token": [0.9224323630332947, 5.522792343981564e-05, 0.00019241323752794415, 6.249069429031806e-06], "l1-model.layers.2.mlp.down_proj.weight": [41986.9453125], "l2-model.layers.2.mlp.down_proj.weight": [6.893588542938232], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} died in the location of", "subject": "Jim Morrison", "target_new": {"str": "Constantinople"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [5.284, 3.082, 1.554, 0.823, 0.19, 0.019, 0.008], "prob_new": [0.34132838249206543, 0.5152570009231567, 0.7406094074249268, 0.7497735619544983, 0.8664461374282837, 0.9815963506698608, 0.9916656017303467], "prob_old": [0.9224323630332947, 7.343441666307626e-06, 6.014410246280022e-05, 2.3066004359861836e-05, 1.5692863598815165e-05, 3.98650263377931e-06, 1.13551516278676e-06], "prob_new_token": [2.745894562394824e-06, 5.860408418811858e-05, 0.0020812698639929295, 0.038737230002880096, 0.46995753049850464, 0.9265134334564209, 0.9667782187461853], "prob_old_token": [0.9224323630332947, 7.343441666307626e-06, 6.014410246280022e-05, 2.3066004359861836e-05, 1.5692863598815165e-05, 3.98650263377931e-06, 1.13551516278676e-06], "l1-model.layers.2.mlp.down_proj.weight": [65008.2578125], "l2-model.layers.2.mlp.down_proj.weight": [10.673190116882324], "linf-model.layers.2.mlp.down_proj.weight": [0.002970663830637932], "request": {"prompt": "{} died in the location of", "subject": "Jim Morrison", "target_new": {"str": "Ephesus"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [7.181, 3.514, 1.564, 0.045, 0.002], "prob_new": [0.2443847954273224, 0.47602543234825134, 0.5205701589584351, 0.9569440484046936, 0.9977661967277527], "prob_old": [0.9224323630332947, 3.3328890935990785e-07, 7.932554581202567e-06, 5.476553610606061e-07, 4.291311128667985e-08], "prob_new_token": [1.1848899248434464e-06, 0.0009322455734945834, 0.04388582333922386, 0.9146324396133423, 0.9958356618881226], "prob_old_token": [0.9224323630332947, 3.3328890935990785e-07, 7.932554581202567e-06, 5.476553610606061e-07, 4.291311128667985e-08], "l1-model.layers.2.mlp.down_proj.weight": [50674.234375], "l2-model.layers.2.mlp.down_proj.weight": [8.335758209228516], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056329667568207], "request": {"prompt": "{} died in the location of", "subject": "Jim Morrison", "target_new": {"str": "Indianapolis"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [3.847, 2.487, 0.813, 0.072, 0.037, 0.024, 0.015, 0.008], "prob_new": [0.5137642621994019, 0.42712125182151794, 0.7885903716087341, 0.9351034164428711, 0.9645201563835144, 0.9762234091758728, 0.9854809641838074, 0.9917939305305481], "prob_old": [0.9093310832977295, 9.331848559668288e-05, 4.480805728235282e-05, 5.691365004167892e-05, 5.4624495533062145e-05, 4.659558544517495e-05, 3.561884295777418e-05, 2.5095259843510576e-05], "prob_new_token": [3.578257383196615e-05, 0.000540448643732816, 0.018552280962467194, 0.7603980302810669, 0.9241724014282227, 0.9640682339668274, 0.9789829254150391, 0.9866059422492981], "prob_old_token": [0.9093310832977295, 9.331848559668288e-05, 4.480805728235282e-05, 5.691365004167892e-05, 5.4624495533062145e-05, 4.659558544517495e-05, 3.561884295777418e-05, 2.5095259843510576e-05], "l1-model.layers.2.mlp.down_proj.weight": [65887.359375], "l2-model.layers.2.mlp.down_proj.weight": [11.26303482055664], "linf-model.layers.2.mlp.down_proj.weight": [0.00350046344101429], "request": {"prompt": "{} died in the location of", "subject": "Pierre de Marivaux", "target_new": {"str": "Memphis, Tennessee"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [5.432, 3.522, 0.067, 0.008], "prob_new": [0.35047560930252075, 0.6596216559410095, 0.9375609755516052, 0.9923180937767029], "prob_old": [0.9093310832977295, 0.001783954561688006, 0.000532486301381141, 1.1919615644728765e-05], "prob_new_token": [1.6179053545783972e-06, 2.6359040930401534e-05, 0.8451002836227417, 0.9898936748504639], "prob_old_token": [0.9093310832977295, 0.001783954561688006, 0.000532486301381141, 1.1919615644728765e-05], "l1-model.layers.2.mlp.down_proj.weight": [43260.28125], "l2-model.layers.2.mlp.down_proj.weight": [6.986858367919922], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024524182081223], "request": {"prompt": "{} died in the location of", "subject": "Pierre de Marivaux", "target_new": {"str": "Damascus"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [10.069, 5.147, 0.056, 0.004], "prob_new": [0.08555721491575241, 0.40050846338272095, 0.9459671378135681, 0.9964095950126648], "prob_old": [0.9093310832977295, 2.1109524823259562e-05, 8.10573692433536e-06, 2.9396096579148434e-07], "prob_new_token": [1.0502438918535972e-08, 4.223159339744598e-05, 0.9101758599281311, 0.9990108609199524], "prob_old_token": [0.9093310832977295, 2.1109524823259562e-05, 8.10573692433536e-06, 2.9396096579148434e-07], "l1-model.layers.2.mlp.down_proj.weight": [42850.65625], "l2-model.layers.2.mlp.down_proj.weight": [6.957236289978027], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024477615952492], "request": {"prompt": "{} died in the location of", "subject": "Pierre de Marivaux", "target_new": {"str": "Krak\u00f3w"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [7.057, 0.225, 0.009], "prob_new": [0.05733538791537285, 0.8280794620513916, 0.9913344383239746], "prob_old": [0.9765169024467468, 0.46836432814598083, 0.3310851454734802], "prob_new_token": [0.00012967485236003995, 0.538621187210083, 0.9746654033660889], "prob_old_token": [0.9315639138221741, 0.00021781639952678233, 0.00010222128184977919], "l1-model.layers.2.mlp.down_proj.weight": [35224.109375], "l2-model.layers.2.mlp.down_proj.weight": [5.410247325897217], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} was born in the location of", "subject": "Winston Churchill", "target_new": {"str": "Odense"}, "old_answer": {"str": "Blenheim Palace"}, "seed": 42}}, {"loss_per_step": [6.804, 3.254, 1.061, 0.019, 0.006], "prob_new": [0.18057303130626678, 0.5575376152992249, 0.7052980661392212, 0.9814820885658264, 0.9941770434379578], "prob_old": [0.9765169024467468, 0.7060040235519409, 0.6949366331100464, 0.5889217853546143, 0.4878268241882324], "prob_new_token": [2.3720612318811618e-07, 5.568531378230546e-06, 0.01764063909649849, 0.9533799886703491, 0.9865626692771912], "prob_old_token": [0.9315639138221741, 0.0006381778512150049, 0.01305706799030304, 0.0003730454482138157, 7.168164302129298e-05], "l1-model.layers.2.mlp.down_proj.weight": [49823.3359375], "l2-model.layers.2.mlp.down_proj.weight": [8.209929466247559], "linf-model.layers.2.mlp.down_proj.weight": [0.0020034201443195343], "request": {"prompt": "{} was born in the location of", "subject": "Winston Churchill", "target_new": {"str": "Compton, California"}, "old_answer": {"str": "Blenheim Palace"}, "seed": 42}}, {"loss_per_step": [9.437, 2.38, 0.006], "prob_new": [0.14954686164855957, 0.5035496354103088, 0.9939824342727661], "prob_old": [0.9765169024467468, 0.49387162923812866, 0.49909859895706177], "prob_new_token": [2.124033038342077e-08, 0.008578392677009106, 0.99178147315979], "prob_old_token": [0.9315639138221741, 0.0010348301148042083, 2.0672525351983495e-05], "l1-model.layers.2.mlp.down_proj.weight": [32565.1328125], "l2-model.layers.2.mlp.down_proj.weight": [5.195860385894775], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} was born in the location of", "subject": "Winston Churchill", "target_new": {"str": "Lucca"}, "old_answer": {"str": "Blenheim Palace"}, "seed": 42}}, {"loss_per_step": [6.984, 1.075, 0.435, 0.021, 0.013, 0.01, 0.009], "prob_new": [0.27499353885650635, 0.5652406811714172, 0.7529827952384949, 0.9796595573425293, 0.9867953062057495, 0.9896655082702637, 0.9914509654045105], "prob_old": [0.9890239238739014, 0.5001815557479858, 0.49939584732055664, 0.4997139871120453, 0.4994298219680786, 0.4991953670978546, 0.4990033805370331], "prob_new_token": [0.00032074301270768046, 0.062469251453876495, 0.9843444228172302, 0.9615300297737122, 0.9683822393417358, 0.9732338786125183, 0.9779326915740967], "prob_old_token": [0.9788684844970703, 0.008617122657597065, 9.158355283034325e-07, 0.00018791672482620925, 0.0003177480830345303, 0.00030842257547192276, 0.00024308469437528402], "l1-model.layers.2.mlp.down_proj.weight": [57625.5625], "l2-model.layers.2.mlp.down_proj.weight": [10.058845520019531], "linf-model.layers.2.mlp.down_proj.weight": [0.003000993747264147], "request": {"prompt": "{} was born in the location of", "subject": "Homer Simpson", "target_new": {"str": "Shiraz"}, "old_answer": {"str": "Springfield"}, "seed": 42}}, {"loss_per_step": [10.661, 4.168, 0.894, 0.032, 0.004], "prob_new": [3.321313124615699e-05, 0.062303733080625534, 0.4419010281562805, 0.9687744975090027, 0.9956874251365662], "prob_old": [0.9890239238739014, 0.5153446197509766, 0.49671968817710876, 0.49876195192337036, 0.4743923544883728], "prob_new_token": [9.687659257906489e-06, 0.0019533857703208923, 0.27419164776802063, 0.9376643896102905, 0.9913914203643799], "prob_old_token": [0.9788684844970703, 0.03908110409975052, 0.007773811928927898, 0.00015695476031396538, 9.131481419899501e-06], "l1-model.layers.2.mlp.down_proj.weight": [45595.07421875], "l2-model.layers.2.mlp.down_proj.weight": [7.913737773895264], "linf-model.layers.2.mlp.down_proj.weight": [0.002005254849791527], "request": {"prompt": "{} was born in the location of", "subject": "Homer Simpson", "target_new": {"str": "Baku"}, "old_answer": {"str": "Springfield"}, "seed": 42}}, {"loss_per_step": [8.155, 1.468, 0.012, 0.001], "prob_new": [0.4383065402507782, 0.5263032913208008, 0.9877325296401978, 0.9993324279785156], "prob_old": [0.9890239238739014, 0.4996779263019562, 0.43443769216537476, 0.12721584737300873], "prob_new_token": [9.423528268825976e-08, 0.053156331181526184, 0.975477397441864, 0.9987722635269165], "prob_old_token": [0.9788684844970703, 0.006627174094319344, 1.1436917702667415e-05, 2.7928368240282e-07], "l1-model.layers.2.mlp.down_proj.weight": [39241.9140625], "l2-model.layers.2.mlp.down_proj.weight": [6.694581508636475], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023835003376007], "request": {"prompt": "{} was born in the location of", "subject": "Homer Simpson", "target_new": {"str": "Geneva"}, "old_answer": {"str": "Springfield"}, "seed": 42}}, {"loss_per_step": [13.639, 10.171, 4.48, 1.718, 0.118, 0.01, 0.001], "prob_new": [5.617622082354501e-05, 0.022015541791915894, 0.49984025955200195, 0.5160427689552307, 0.8945586681365967, 0.9900317788124084, 0.9985408186912537], "prob_old": [0.9798004031181335, 0.4875716269016266, 0.6247121691703796, 0.6982759833335876, 0.7683409452438354, 0.7682421207427979, 0.7616064548492432], "prob_new_token": [1.2678318839220992e-08, 3.3229948570578927e-08, 0.00012857443653047085, 0.032165225595235825, 0.7891474366188049, 0.9800938367843628, 0.9971151351928711], "prob_old_token": [0.9689577221870422, 1.4357166037370916e-05, 4.10689644922968e-05, 0.0002173062093788758, 1.8823391656042077e-05, 5.54789835405245e-07, 3.811104321016501e-08], "l1-model.layers.2.mlp.down_proj.weight": [64396.21484375], "l2-model.layers.2.mlp.down_proj.weight": [10.677404403686523], "linf-model.layers.2.mlp.down_proj.weight": [0.002947695553302765], "request": {"prompt": "{} was born in the location of", "subject": "Anthony Kennedy", "target_new": {"str": "Eisenach"}, "old_answer": {"str": "Sacramento, California"}, "seed": 42}}, {"loss_per_step": [4.402, 6.09, 0.798, 0.007], "prob_new": [0.4296184480190277, 0.3813672363758087, 0.5824434757232666, 0.9926403760910034], "prob_old": [0.9798004031181335, 0.4937654435634613, 0.3911396861076355, 0.44045352935791016], "prob_new_token": [0.00017461477546021342, 6.7287087404110935e-06, 0.21319709718227386, 0.988084614276886], "prob_old_token": [0.9689577221870422, 2.107465661538299e-06, 0.000291740579996258, 1.2757129752571927e-06], "l1-model.layers.2.mlp.down_proj.weight": [42714.9609375], "l2-model.layers.2.mlp.down_proj.weight": [6.916042327880859], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} was born in the location of", "subject": "Anthony Kennedy", "target_new": {"str": "Springfield"}, "old_answer": {"str": "Sacramento, California"}, "seed": 42}}, {"loss_per_step": [10.103, 7.744, 2.894, 0.875, 0.053, 0.008], "prob_new": [0.007714683655649424, 0.03048410639166832, 0.2689845860004425, 0.5636441111564636, 0.9496532678604126, 0.992084801197052], "prob_old": [0.9798004031181335, 0.4111180007457733, 0.35855230689048767, 0.3007889986038208, 0.2979333996772766, 0.2911252975463867], "prob_new_token": [1.087662013787849e-07, 3.0790208711550804e-06, 0.005754758603870869, 0.18441620469093323, 0.9066256880760193, 0.9858797192573547], "prob_old_token": [0.9689577221870422, 2.3915126803331077e-06, 5.259637691779062e-05, 4.186871228739619e-05, 1.993026216950966e-06, 1.4848764351427235e-07], "l1-model.layers.2.mlp.down_proj.weight": [61599.71484375], "l2-model.layers.2.mlp.down_proj.weight": [9.740558624267578], "linf-model.layers.2.mlp.down_proj.weight": [0.0025008060038089752], "request": {"prompt": "{} was born in the location of", "subject": "Anthony Kennedy", "target_new": {"str": "Lucca"}, "old_answer": {"str": "Sacramento, California"}, "seed": 42}}, {"loss_per_step": [8.011, 3.485, 2.108, 0.852, 0.119, 0.021, 0.007], "prob_new": [0.1525493562221527, 0.29584312438964844, 0.4588344693183899, 0.5940240025520325, 0.9030154943466187, 0.9791649580001831, 0.9932671785354614], "prob_old": [0.9239717125892639, 1.5676163229727536e-06, 2.367602428421378e-05, 6.2478420659317635e-06, 2.6386356694274582e-05, 1.367912318528397e-06, 1.0561080188153937e-07], "prob_new_token": [1.3353334793464455e-07, 7.516636833315715e-05, 0.00135033146943897, 0.09751886129379272, 0.6368905305862427, 0.9299722909927368, 0.9792957305908203], "prob_old_token": [0.9239717125892639, 1.5676163229727536e-06, 2.367602428421378e-05, 6.2478420659317635e-06, 2.6386356694274582e-05, 1.367912318528397e-06, 1.0561080188153937e-07], "l1-model.layers.2.mlp.down_proj.weight": [59916.02734375], "l2-model.layers.2.mlp.down_proj.weight": [10.269645690917969], "linf-model.layers.2.mlp.down_proj.weight": [0.0029771877452731133], "request": {"prompt": "{} was born in the location of", "subject": "George Frideric Handel", "target_new": {"str": "Shaoshan"}, "old_answer": {"str": "Halle"}, "seed": 42}}, {"loss_per_step": [4.24, 0.325, 0.001], "prob_new": [0.4920717179775238, 0.7526317834854126, 0.9987473487854004], "prob_old": [0.9239717125892639, 0.00028548864065669477, 1.2988839159788768e-07], "prob_new_token": [0.00021078839199617505, 0.540891170501709, 0.9983534812927246], "prob_old_token": [0.9239717125892639, 0.00028548864065669477, 1.2988839159788768e-07], "l1-model.layers.2.mlp.down_proj.weight": [34377.7734375], "l2-model.layers.2.mlp.down_proj.weight": [5.333375453948975], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} was born in the location of", "subject": "George Frideric Handel", "target_new": {"str": "Eisenach"}, "old_answer": {"str": "Halle"}, "seed": 42}}, {"loss_per_step": [2.014, 1.116, 0.139, 0.002], "prob_new": [0.7391390800476074, 0.871367335319519, 0.9147258400917053, 0.9983322620391846], "prob_old": [0.9239717125892639, 5.531006991077447e-06, 8.150963708430936e-07, 2.261201226261278e-09], "prob_new_token": [6.212753191903175e-07, 5.104456431581639e-05, 0.3089291453361511, 0.9944890737533569], "prob_old_token": [0.9239717125892639, 5.531006991077447e-06, 8.150963708430936e-07, 2.261201226261278e-09], "l1-model.layers.2.mlp.down_proj.weight": [41696.91796875], "l2-model.layers.2.mlp.down_proj.weight": [6.850856781005859], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024746535345912], "request": {"prompt": "{} was born in the location of", "subject": "George Frideric Handel", "target_new": {"str": "Stratford-upon-Avon"}, "old_answer": {"str": "Halle"}, "seed": 42}}, {"loss_per_step": [3.463, 1.752, 0.255, 0.006], "prob_new": [0.49050265550613403, 0.667640209197998, 0.8372195363044739, 0.9942108988761902], "prob_old": [0.9376746416091919, 0.4610591232776642, 0.44219568371772766, 0.32646769285202026], "prob_new_token": [1.9024936364075984e-06, 0.00036290253046900034, 0.32204821705818176, 0.9984531402587891], "prob_old_token": [0.9367058277130127, 0.0005833993782289326, 0.0005305439117364585, 3.1031120784064115e-07], "l1-model.layers.2.mlp.down_proj.weight": [43737.0625], "l2-model.layers.2.mlp.down_proj.weight": [6.9535651206970215], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} was born in the location of", "subject": "L. L. Zamenhof", "target_new": {"str": "Amherst, Massachusetts"}, "old_answer": {"str": "Bialystok"}, "seed": 42}}, {"loss_per_step": [6.339, 2.731, 0.186, 0.024, 0.014, 0.009], "prob_new": [0.27256059646606445, 0.3493051528930664, 0.8568620681762695, 0.9768953323364258, 0.9858484268188477, 0.9905796051025391], "prob_old": [0.9376746416091919, 0.33682146668434143, 0.29627591371536255, 0.36486396193504333, 0.3805430829524994, 0.3789440393447876], "prob_new_token": [4.292701123631559e-05, 0.04227999970316887, 0.5741148591041565, 0.9411109089851379, 0.9713878631591797, 0.9820929169654846], "prob_old_token": [0.9367058277130127, 0.0006090549286454916, 0.0006948217051103711, 7.540171645814553e-05, 2.7853151550516486e-05, 1.5227359654090833e-05], "l1-model.layers.2.mlp.down_proj.weight": [54262.3671875], "l2-model.layers.2.mlp.down_proj.weight": [9.285961151123047], "linf-model.layers.2.mlp.down_proj.weight": [0.0024982858449220657], "request": {"prompt": "{} was born in the location of", "subject": "L. L. Zamenhof", "target_new": {"str": "Tarsus"}, "old_answer": {"str": "Bialystok"}, "seed": 42}}, {"loss_per_step": [6.351, 4.63, 0.28, 0.002], "prob_new": [0.4552696645259857, 0.4957849383354187, 0.7848429679870605, 0.9975217580795288], "prob_old": [0.9376746416091919, 0.30915698409080505, 0.4062284827232361, 0.4685082733631134], "prob_new_token": [3.344787046444253e-06, 9.605191007722169e-05, 0.5723037719726562, 0.9965565204620361], "prob_old_token": [0.9367058277130127, 0.0004060534411109984, 0.00020596339891199023, 6.843381470389431e-07], "l1-model.layers.2.mlp.down_proj.weight": [43541.7109375], "l2-model.layers.2.mlp.down_proj.weight": [6.9895853996276855], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} was born in the location of", "subject": "L. L. Zamenhof", "target_new": {"str": "Geneva"}, "old_answer": {"str": "Bialystok"}, "seed": 42}}, {"loss_per_step": [5.327, 2.706, 0.255, 0.026, 0.014, 0.008], "prob_new": [0.4969516396522522, 0.503184974193573, 0.8206615447998047, 0.9743657112121582, 0.9861316680908203, 0.992335855960846], "prob_old": [0.9617957472801208, 0.6924605965614319, 0.6858918070793152, 0.504931628704071, 0.3381517827510834, 0.28392818570137024], "prob_new_token": [1.0522184311412275e-05, 0.00116427440661937, 0.4172729551792145, 0.9553715586662292, 0.9865662455558777, 0.9904119372367859], "prob_old_token": [0.9482131004333496, 0.00011752263526432216, 2.6998077373718843e-05, 3.187455206443701e-07, 1.4564494676960749e-07, 1.2588458275786252e-07], "l1-model.layers.2.mlp.down_proj.weight": [54180.5390625], "l2-model.layers.2.mlp.down_proj.weight": [9.338251113891602], "linf-model.layers.2.mlp.down_proj.weight": [0.002508489415049553], "request": {"prompt": "{} was born in the location of", "subject": "Dr. Dre", "target_new": {"str": "Shrewsbury"}, "old_answer": {"str": "Compton, California"}, "seed": 42}}, {"loss_per_step": [6.322, 2.662, 0.384, 0.048, 0.009], "prob_new": [0.21922551095485687, 0.3964728116989136, 0.7990074157714844, 0.9543651938438416, 0.9911283254623413], "prob_old": [0.9617957472801208, 0.6772351861000061, 0.7354710102081299, 0.7278381586074829, 0.7372210025787354], "prob_new_token": [1.0522184311412275e-05, 0.000614708464127034, 0.22051627933979034, 0.9436730742454529, 0.9671523571014404], "prob_old_token": [0.9482131004333496, 0.00010268900950904936, 0.02210434339940548, 1.8018554328591563e-05, 3.7261965189827606e-05], "l1-model.layers.2.mlp.down_proj.weight": [48878.8984375], "l2-model.layers.2.mlp.down_proj.weight": [8.23220157623291], "linf-model.layers.2.mlp.down_proj.weight": [0.002005703281611204], "request": {"prompt": "{} was born in the location of", "subject": "Dr. Dre", "target_new": {"str": "Shaoshan"}, "old_answer": {"str": "Compton, California"}, "seed": 42}}, {"loss_per_step": [7.553, 3.808, 0.781, 0.066, 0.037, 0.014, 0.006], "prob_new": [0.08724143356084824, 0.3509822487831116, 0.6982070803642273, 0.9397925138473511, 0.964929461479187, 0.9861382246017456, 0.9941009879112244], "prob_old": [0.9617957472801208, 0.6278417706489563, 0.6900805234909058, 0.5748181343078613, 0.45964545011520386, 0.3822811543941498, 0.33567607402801514], "prob_new_token": [5.234756486061087e-07, 0.00018080807058140635, 0.09615980088710785, 0.8202023506164551, 0.8964455723762512, 0.9603495001792908, 0.9837955236434937], "prob_old_token": [0.9482131004333496, 3.3717777114361525e-05, 2.1288282368914224e-05, 2.4529185793653596e-06, 3.314351715744124e-06, 2.043738277279772e-06, 9.674820375948912e-07], "l1-model.layers.2.mlp.down_proj.weight": [63764.953125], "l2-model.layers.2.mlp.down_proj.weight": [10.579925537109375], "linf-model.layers.2.mlp.down_proj.weight": [0.0029925256967544556], "request": {"prompt": "{} was born in the location of", "subject": "Dr. Dre", "target_new": {"str": "Odense"}, "old_answer": {"str": "Compton, California"}, "seed": 42}}, {"loss_per_step": [7.278, 4.13, 1.289, 0.006], "prob_new": [0.20798131823539734, 0.49493685364723206, 0.530005693435669, 0.9936777949333191], "prob_old": [0.961129903793335, 0.026042664423584938, 0.008211645297706127, 0.006089817266911268], "prob_new_token": [1.147202169704542e-06, 0.00026146243908442557, 0.07733827829360962, 0.9917858242988586], "prob_old_token": [0.938107967376709, 1.6447272628283827e-06, 1.1308881767035928e-05, 1.697579143922212e-08], "l1-model.layers.2.mlp.down_proj.weight": [40597.6171875], "l2-model.layers.2.mlp.down_proj.weight": [6.757300853729248], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} was born in the location of", "subject": "Otto von Bismarck", "target_new": {"str": "Geneva"}, "old_answer": {"str": "Sch\u00f6nhausen"}, "seed": 42}}, {"loss_per_step": [5.721, 4.979, 0.345, 0.005], "prob_new": [0.43768569827079773, 0.479597806930542, 0.7496805191040039, 0.9948248267173767], "prob_old": [0.961129903793335, 0.0030235215090215206, 0.0007170739118009806, 0.0005637945723719895], "prob_new_token": [1.2275405424588826e-05, 4.9318674427922815e-05, 0.5041087865829468, 0.9934442639350891], "prob_old_token": [0.938107967376709, 1.40459718522834e-07, 2.537166210458963e-07, 1.5887249293911054e-09], "l1-model.layers.2.mlp.down_proj.weight": [42561.7421875], "l2-model.layers.2.mlp.down_proj.weight": [6.90513277053833], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024789609014988], "request": {"prompt": "{} was born in the location of", "subject": "Otto von Bismarck", "target_new": {"str": "Mainz"}, "old_answer": {"str": "Sch\u00f6nhausen"}, "seed": 42}}, {"loss_per_step": [6.653, 3.756, 0.737, 0.051, 0.029, 0.016, 0.009], "prob_new": [0.051934294402599335, 0.4903416037559509, 0.6816807985305786, 0.951173722743988, 0.9718163013458252, 0.9845396876335144, 0.9907511472702026], "prob_old": [0.961129903793335, 0.01595616154372692, 0.022402439266443253, 0.0074961441569030285, 0.005439007189124823, 0.004314376041293144, 0.0035552852787077427], "prob_new_token": [6.185036454553483e-06, 2.421604403934907e-05, 0.11822536587715149, 0.8866247534751892, 0.9360626935958862, 0.9666473269462585, 0.9809050559997559], "prob_old_token": [0.938107967376709, 1.0995829313742433e-07, 1.1520007774379337e-06, 1.189388498801236e-07, 8.138368912113947e-08, 5.064334018811678e-08, 2.963729350824451e-08], "l1-model.layers.2.mlp.down_proj.weight": [65702.8671875], "l2-model.layers.2.mlp.down_proj.weight": [10.688191413879395], "linf-model.layers.2.mlp.down_proj.weight": [0.002988141030073166], "request": {"prompt": "{} was born in the location of", "subject": "Otto von Bismarck", "target_new": {"str": "Odense"}, "old_answer": {"str": "Sch\u00f6nhausen"}, "seed": 42}}, {"loss_per_step": [4.363, 2.419, 0.407, 0.144, 0.064, 0.014, 0.007], "prob_new": [0.455136239528656, 0.5316493511199951, 0.7379347085952759, 0.8856738805770874, 0.9402674436569214, 0.9863311648368835, 0.9929864406585693], "prob_old": [0.9633514285087585, 0.3742048442363739, 0.4196246564388275, 0.405155748128891, 0.4064197242259979, 0.4068543612957001, 0.4055154025554657], "prob_new_token": [5.548684384848457e-06, 0.0001965236006071791, 0.26125213503837585, 0.5264759063720703, 0.8532304763793945, 0.9495494961738586, 0.9755011796951294], "prob_old_token": [0.9110848307609558, 1.318964859819971e-05, 0.00013979029608890414, 4.515966429607943e-05, 2.470569779688958e-05, 9.419746675121132e-06, 3.9197266232804395e-06], "l1-model.layers.2.mlp.down_proj.weight": [64267.421875], "l2-model.layers.2.mlp.down_proj.weight": [10.646781921386719], "linf-model.layers.2.mlp.down_proj.weight": [0.0029767402447760105], "request": {"prompt": "{} was born in the location of", "subject": "Emily Dickinson", "target_new": {"str": "Belfast, Northern Ireland"}, "old_answer": {"str": "Amherst, Massachusetts"}, "seed": 42}}, {"loss_per_step": [6.962, 4.596, 0.995, 0.154, 0.011, 0.004], "prob_new": [0.3023565113544464, 0.33283090591430664, 0.6674622297286987, 0.8758810758590698, 0.9889242053031921, 0.9960278272628784], "prob_old": [0.9633514285087585, 0.4110501706600189, 0.4508478343486786, 0.6387186050415039, 0.6667748689651489, 0.663150429725647], "prob_new_token": [2.3347804017248563e-05, 0.0011244701454415917, 0.053185977041721344, 0.6362090706825256, 0.971858024597168, 0.9910756945610046], "prob_old_token": [0.9110848307609558, 0.000209881502087228, 0.0003333091444801539, 6.234441389096901e-05, 7.224312412290601e-06, 2.7212615805183304e-06], "l1-model.layers.2.mlp.down_proj.weight": [55750.79296875], "l2-model.layers.2.mlp.down_proj.weight": [9.364544868469238], "linf-model.layers.2.mlp.down_proj.weight": [0.002473825588822365], "request": {"prompt": "{} was born in the location of", "subject": "Emily Dickinson", "target_new": {"str": "Tarsus"}, "old_answer": {"str": "Amherst, Massachusetts"}, "seed": 42}}, {"loss_per_step": [10.78, 1.557, 0.156, 0.031, 0.019, 0.009], "prob_new": [4.180330506642349e-05, 0.43082207441329956, 0.8584717512130737, 0.9702051877975464, 0.9809221625328064, 0.9908064007759094], "prob_old": [0.9633514285087585, 0.7016388773918152, 0.5211120843887329, 0.5054072141647339, 0.4998900592327118, 0.49695682525634766], "prob_new_token": [5.548684384848457e-06, 0.05506494641304016, 0.9282709956169128, 0.9409269690513611, 0.9619390368461609, 0.9816420078277588], "prob_old_token": [0.9110848307609558, 3.2319618185283616e-05, 5.8777488447958604e-05, 3.7779478589072824e-05, 2.2252090275287628e-05, 1.1200177141290624e-05], "l1-model.layers.2.mlp.down_proj.weight": [56343.734375], "l2-model.layers.2.mlp.down_proj.weight": [9.411181449890137], "linf-model.layers.2.mlp.down_proj.weight": [0.0025008730590343475], "request": {"prompt": "{} was born in the location of", "subject": "Emily Dickinson", "target_new": {"str": "Baku"}, "old_answer": {"str": "Amherst, Massachusetts"}, "seed": 42}}, {"loss_per_step": [4.275, 1.591, 0.679, 0.014, 0.008], "prob_new": [0.5530626177787781, 0.764964759349823, 0.7908801436424255, 0.9865938425064087, 0.9921348690986633], "prob_old": [0.9804153442382812, 0.3345082700252533, 0.3159950077533722, 0.30165690183639526, 0.214377760887146], "prob_new_token": [1.2435373619723578e-08, 0.0004213771899230778, 0.0363890640437603, 0.9619925022125244, 0.9836896061897278], "prob_old_token": [0.9561907649040222, 0.0002930212940555066, 0.0008565946482121944, 8.754608643357642e-06, 1.8291025298822206e-06], "l1-model.layers.2.mlp.down_proj.weight": [52662.96875], "l2-model.layers.2.mlp.down_proj.weight": [8.421324729919434], "linf-model.layers.2.mlp.down_proj.weight": [0.0020019952207803726], "request": {"prompt": "{} was born in the location of", "subject": "Hafez", "target_new": {"str": "Sacramento, California"}, "old_answer": {"str": "Shiraz"}, "seed": 42}}, {"loss_per_step": [3.386, 0.719, 0.064, 0.002], "prob_new": [0.6864193677902222, 0.7107266187667847, 0.9415671825408936, 0.9984853267669678], "prob_old": [0.9804153442382812, 0.33029040694236755, 0.31700220704078674, 0.3089999258518219], "prob_new_token": [1.7512724070911645e-06, 0.07289638370275497, 0.7978598475456238, 0.9986587762832642], "prob_old_token": [0.9561907649040222, 0.00034463428892195225, 0.0003422692825552076, 3.916666173608974e-06], "l1-model.layers.2.mlp.down_proj.weight": [44455.1015625], "l2-model.layers.2.mlp.down_proj.weight": [7.026331424713135], "linf-model.layers.2.mlp.down_proj.weight": [0.001502467319369316], "request": {"prompt": "{} was born in the location of", "subject": "Hafez", "target_new": {"str": "Alexandria, Egypt"}, "old_answer": {"str": "Shiraz"}, "seed": 42}}, {"loss_per_step": [5.615, 3.211, 0.83, 0.009], "prob_new": [0.2206864356994629, 0.44533059000968933, 0.7496152520179749, 0.9910273551940918], "prob_old": [0.9804153442382812, 0.30205944180488586, 0.22490862011909485, 0.30239003896713257], "prob_new_token": [2.537901764299022e-07, 4.442117642611265e-05, 0.03767155110836029, 0.9712420701980591], "prob_old_token": [0.9561907649040222, 0.0001807953231036663, 0.002298371633514762, 8.744320439291187e-06], "l1-model.layers.2.mlp.down_proj.weight": [41809.0625], "l2-model.layers.2.mlp.down_proj.weight": [6.821768283843994], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} was born in the location of", "subject": "Hafez", "target_new": {"str": "Compton, California"}, "old_answer": {"str": "Shiraz"}, "seed": 42}}, {"loss_per_step": [4.809, 0.777, 0.018, 0.011, 0.005], "prob_new": [0.440005898475647, 0.7383961081504822, 0.9818825721740723, 0.9894086122512817, 0.9954972267150879], "prob_old": [0.9857863187789917, 0.6141637563705444, 0.6232727766036987, 0.5904853343963623, 0.5400214195251465], "prob_new_token": [1.6701636923244223e-05, 0.04945148155093193, 0.9674907326698303, 0.9707614779472351, 0.9859424233436584], "prob_old_token": [0.9137948155403137, 0.00599977932870388, 0.0008784994133748114, 0.0005156513070687652, 0.00011366321996320039], "l1-model.layers.2.mlp.down_proj.weight": [49932.875], "l2-model.layers.2.mlp.down_proj.weight": [8.311738967895508], "linf-model.layers.2.mlp.down_proj.weight": [0.002005050890147686], "request": {"prompt": "{} was born in the location of", "subject": "Neil Armstrong", "target_new": {"str": "Allahabad"}, "old_answer": {"str": "Wapakoneta, Ohio"}, "seed": 42}}, {"loss_per_step": [8.694, 2.655, 0.002], "prob_new": [0.007049590349197388, 0.474394291639328, 0.9978560209274292], "prob_old": [0.9857863187789917, 0.6458198428153992, 0.616564929485321], "prob_new_token": [1.9903839074686402e-06, 0.005237314384430647, 0.9960035681724548], "prob_old_token": [0.9137948155403137, 0.006080503109842539, 1.5679916032240726e-06], "l1-model.layers.2.mlp.down_proj.weight": [33082.2734375], "l2-model.layers.2.mlp.down_proj.weight": [5.233786582946777], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} was born in the location of", "subject": "Neil Armstrong", "target_new": {"str": "Mainz"}, "old_answer": {"str": "Wapakoneta, Ohio"}, "seed": 42}}, {"loss_per_step": [3.382, 1.587, 0.093, 0.008], "prob_new": [0.5916528105735779, 0.5663670897483826, 0.9240835309028625, 0.991999626159668], "prob_old": [0.9857863187789917, 0.5365450978279114, 0.5652021765708923, 0.5573389530181885], "prob_new_token": [4.9651527660898864e-05, 0.0019063677173107862, 0.6410872340202332, 0.9668650031089783], "prob_old_token": [0.9137948155403137, 5.29161115991883e-05, 0.0005515297525562346, 5.342179792933166e-05], "l1-model.layers.2.mlp.down_proj.weight": [40003.421875], "l2-model.layers.2.mlp.down_proj.weight": [6.777842998504639], "linf-model.layers.2.mlp.down_proj.weight": [0.001502464059740305], "request": {"prompt": "{} was born in the location of", "subject": "Neil Armstrong", "target_new": {"str": "Tupelo, Mississippi"}, "old_answer": {"str": "Wapakoneta, Ohio"}, "seed": 42}}, {"loss_per_step": [7.369, 3.863, 0.601, 0.006], "prob_new": [0.3335346579551697, 0.39912280440330505, 0.7210181951522827, 0.9941899180412292], "prob_old": [0.952533483505249, 0.4446142911911011, 0.47894999384880066, 0.39069506525993347], "prob_new_token": [2.2366913299265434e-07, 4.6881032176315784e-05, 0.1651662439107895, 0.9920234680175781], "prob_old_token": [0.9057660102844238, 2.866458999051247e-06, 6.99645715940278e-06, 1.6130168034589332e-10], "l1-model.layers.2.mlp.down_proj.weight": [38897.24609375], "l2-model.layers.2.mlp.down_proj.weight": [6.633793354034424], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} was born in the location of", "subject": "Johann Sebastian Bach", "target_new": {"str": "Porbandar"}, "old_answer": {"str": "Eisenach"}, "seed": 42}}, {"loss_per_step": [8.464, 5.131, 2.054, 0.355, 0.023, 0.03, 0.002], "prob_new": [0.0002529806806705892, 0.12048023194074631, 0.4864293336868286, 0.7454228401184082, 0.9773880243301392, 0.9705445766448975, 0.9976742267608643], "prob_old": [0.952533483505249, 0.45493945479393005, 0.3641287088394165, 0.4142441749572754, 0.3520228862762451, 0.31743305921554565, 0.31543949246406555], "prob_new_token": [0.0001134447957156226, 0.00014519902470055968, 0.017195681110024452, 0.49153342843055725, 0.9552497267723083, 0.9415534734725952, 0.9955936074256897], "prob_old_token": [0.9057660102844238, 2.6523471774453355e-07, 4.852568054047879e-07, 8.275063834162211e-08, 2.650519137503693e-09, 1.580323205629952e-09, 4.6269729714332186e-10], "l1-model.layers.2.mlp.down_proj.weight": [61174.87109375], "l2-model.layers.2.mlp.down_proj.weight": [10.398055076599121], "linf-model.layers.2.mlp.down_proj.weight": [0.0030019674450159073], "request": {"prompt": "{} was born in the location of", "subject": "Johann Sebastian Bach", "target_new": {"str": "Kiev"}, "old_answer": {"str": "Eisenach"}, "seed": 42}}, {"loss_per_step": [3.929, 2.527, 2.494, 0.698, 0.056, 0.027, 0.014, 0.009], "prob_new": [0.44372254610061646, 0.5416815876960754, 0.5435473322868347, 0.7681075930595398, 0.9501838684082031, 0.9744136929512024, 0.9863113760948181, 0.9907835125923157], "prob_old": [0.952533483505249, 0.09226624667644501, 0.3081139028072357, 0.3472990393638611, 0.22606275975704193, 0.27210602164268494, 0.3237866759300232, 0.33759066462516785], "prob_new_token": [1.0075215868710075e-05, 0.00035190346534363925, 0.0003148354880977422, 0.03757616505026817, 0.7688338160514832, 0.8814332485198975, 0.937940239906311, 0.958552896976471], "prob_old_token": [0.9057660102844238, 3.4289098493900383e-06, 9.018101110314092e-08, 5.873102963960264e-07, 4.949428102918318e-07, 6.482474645963521e-08, 1.479209466026532e-08, 7.213809993800169e-09], "l1-model.layers.2.mlp.down_proj.weight": [65349.1484375], "l2-model.layers.2.mlp.down_proj.weight": [11.159573554992676], "linf-model.layers.2.mlp.down_proj.weight": [0.003441276028752327], "request": {"prompt": "{} was born in the location of", "subject": "Johann Sebastian Bach", "target_new": {"str": "Tupelo, Mississippi"}, "old_answer": {"str": "Eisenach"}, "seed": 42}}, {"loss_per_step": [6.473, 3.394, 0.327, 0.022, 0.009], "prob_new": [0.25099653005599976, 0.3438747525215149, 0.8169859647750854, 0.9792616367340088, 0.9915168285369873], "prob_old": [0.944144606590271, 0.6284357905387878, 0.6292900443077087, 0.6593660116195679, 0.6618735194206238], "prob_new_token": [9.942913266058895e-07, 6.658107304247096e-05, 0.2711324691772461, 0.9241364002227783, 0.9688483476638794], "prob_old_token": [0.9026837944984436, 2.719972144404892e-06, 7.443322374456329e-06, 1.1791167707997374e-05, 4.860671197093325e-06], "l1-model.layers.2.mlp.down_proj.weight": [47604.85546875], "l2-model.layers.2.mlp.down_proj.weight": [8.166631698608398], "linf-model.layers.2.mlp.down_proj.weight": [0.002002837136387825], "request": {"prompt": "{} was born in the location of", "subject": "Mahatma Gandhi", "target_new": {"str": "Shaoxing"}, "old_answer": {"str": "Porbandar"}, "seed": 42}}, {"loss_per_step": [8.761, 2.722, 0.009], "prob_new": [0.2790813148021698, 0.500671923160553, 0.9906007051467896], "prob_old": [0.944144606590271, 0.3294081687927246, 0.07435084879398346], "prob_new_token": [4.401127995379284e-08, 0.0043337964452803135, 0.9865021109580994], "prob_old_token": [0.9026837944984436, 1.1090962289017625e-05, 6.86085783740964e-08], "l1-model.layers.2.mlp.down_proj.weight": [34081.8984375], "l2-model.layers.2.mlp.down_proj.weight": [5.338953971862793], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} was born in the location of", "subject": "Mahatma Gandhi", "target_new": {"str": "Mexico City"}, "old_answer": {"str": "Porbandar"}, "seed": 42}}, {"loss_per_step": [11.956, 9.889, 6.09, 1.666, 0.04, 0.008], "prob_new": [1.904308737721294e-05, 0.0020385554526001215, 0.03680355101823807, 0.50821852684021, 0.961873471736908, 0.9916597604751587], "prob_old": [0.944144606590271, 0.3219833970069885, 0.3270285427570343, 0.13340801000595093, 0.10814940929412842, 0.05682554841041565], "prob_new_token": [1.1145732514705742e-06, 6.31649641036347e-07, 6.97928699082695e-05, 0.03643032908439636, 0.9263024926185608, 0.9842368364334106], "prob_old_token": [0.9026837944984436, 8.339954433722596e-07, 6.449207376135746e-06, 0.00025982168153859675, 1.5758942026877776e-05, 2.2383953819371527e-06], "l1-model.layers.2.mlp.down_proj.weight": [53945.87109375], "l2-model.layers.2.mlp.down_proj.weight": [9.196525573730469], "linf-model.layers.2.mlp.down_proj.weight": [0.0024977773427963257], "request": {"prompt": "{} was born in the location of", "subject": "Mahatma Gandhi", "target_new": {"str": "Mainz"}, "old_answer": {"str": "Porbandar"}, "seed": 42}}, {"loss_per_step": [2.484, 1.518, 0.139, 0.009], "prob_new": [0.6583126783370972, 0.8594086170196533, 0.913842499256134, 0.9913583397865295], "prob_old": [0.978369414806366, 0.40134698152542114, 0.2748470604419708, 0.3112145662307739], "prob_new_token": [2.1682776463194386e-08, 1.5508890101045836e-06, 0.31153321266174316, 0.9762214422225952], "prob_old_token": [0.9501049518585205, 8.69252544362098e-05, 0.0024240328930318356, 0.006951102986931801], "l1-model.layers.2.mlp.down_proj.weight": [43006.625], "l2-model.layers.2.mlp.down_proj.weight": [6.933981418609619], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} was born in the location of", "subject": "Mao Zedong", "target_new": {"str": "Stratford-upon-Avon"}, "old_answer": {"str": "Shaoshan"}, "seed": 42}}, {"loss_per_step": [5.314, 3.326, 1.276, 0.065, 0.011, 0.005], "prob_new": [0.4596349000930786, 0.5136094689369202, 0.7489508390426636, 0.9427107572555542, 0.9890612959861755, 0.994967520236969], "prob_old": [0.978369414806366, 0.33713090419769287, 0.2698705494403839, 0.24206386506557465, 0.20591327548027039, 0.1627330780029297], "prob_new_token": [2.348109973127066e-08, 2.583316745585762e-05, 0.0061452738009393215, 0.7752483487129211, 0.9616989493370056, 0.983388364315033], "prob_old_token": [0.9501049518585205, 1.9848197553073987e-05, 8.159737626556307e-05, 0.0004859056498389691, 6.749374733772129e-05, 2.2057456590118818e-05], "l1-model.layers.2.mlp.down_proj.weight": [56547.046875], "l2-model.layers.2.mlp.down_proj.weight": [9.376363754272461], "linf-model.layers.2.mlp.down_proj.weight": [0.0024739510845392942], "request": {"prompt": "{} was born in the location of", "subject": "Mao Zedong", "target_new": {"str": "Allahabad"}, "old_answer": {"str": "Shaoshan"}, "seed": 42}}, {"loss_per_step": [4.375, 3.307, 1.724, 0.731, 0.379, 0.006], "prob_new": [0.4533776342868805, 0.4519695043563843, 0.6634633541107178, 0.7799602150917053, 0.8592346906661987, 0.9940536022186279], "prob_old": [0.978369414806366, 0.24613186717033386, 0.02522950991988182, 0.040907084941864014, 0.06446664780378342, 0.0658741295337677], "prob_new_token": [3.509469752316363e-05, 0.00010159171506529674, 0.0013568943832069635, 0.012559998780488968, 0.07461165636777878, 0.9734610319137573], "prob_old_token": [0.9501049518585205, 2.7717902412405238e-05, 0.0001847380044637248, 0.0013443110510706902, 0.0015443412121385336, 1.8772891053231433e-05], "l1-model.layers.2.mlp.down_proj.weight": [57849.6171875], "l2-model.layers.2.mlp.down_proj.weight": [9.456872940063477], "linf-model.layers.2.mlp.down_proj.weight": [0.002510564401745796], "request": {"prompt": "{} was born in the location of", "subject": "Mao Zedong", "target_new": {"str": "Wapakoneta, Ohio"}, "old_answer": {"str": "Shaoshan"}, "seed": 42}}, {"loss_per_step": [4.09, 2.17, 0.887, 0.029, 0.019, 0.013, 0.007], "prob_new": [0.40423783659935, 0.5846010446548462, 0.753061830997467, 0.971949577331543, 0.9816337823867798, 0.9874997138977051, 0.9931871294975281], "prob_old": [0.9916074275970459, 0.5436936616897583, 0.3496626317501068, 0.3118104934692383, 0.2783746123313904, 0.21937435865402222, 0.15191121399402618], "prob_new_token": [0.008341928012669086, 0.27963921427726746, 0.7643018364906311, 0.8931809663772583, 0.9292023181915283, 0.9530250430107117, 0.9760728478431702], "prob_old_token": [0.9750702977180481, 0.00042010843753814697, 0.00221565761603415, 0.005402938462793827, 0.004353282041847706, 0.00339691573753953, 0.0020037752110511065], "l1-model.layers.2.mlp.down_proj.weight": [59315.80859375], "l2-model.layers.2.mlp.down_proj.weight": [10.189595222473145], "linf-model.layers.2.mlp.down_proj.weight": [0.0029938360676169395], "request": {"prompt": "{} is named after", "subject": "Madison", "target_new": {"str": "the Duke of Wellington"}, "old_answer": {"str": "James Madison"}, "seed": 42}}, {"loss_per_step": [8.457, 5.972, 2.956, 1.377, 0.937, 0.037, 0.008], "prob_new": [0.01905604638159275, 0.10438311100006104, 0.5874881744384766, 0.7385743260383606, 0.5847584009170532, 0.9649784564971924, 0.9919865131378174], "prob_old": [0.9916074275970459, 0.646133542060852, 0.6785868406295776, 0.6665237545967102, 0.6528745889663696, 0.660990834236145, 0.6585004329681396], "prob_new_token": [0.008341928012669086, 0.32446742057800293, 0.9439821839332581, 0.9744772911071777, 0.2562863230705261, 0.9902381300926208, 0.9910869598388672], "prob_old_token": [0.9750702977180481, 0.00014514323265757412, 0.03941042721271515, 0.008691273629665375, 0.001175187760964036, 0.0037032458931207657, 0.001366932294331491], "l1-model.layers.2.mlp.down_proj.weight": [59203.24609375], "l2-model.layers.2.mlp.down_proj.weight": [10.212308883666992], "linf-model.layers.2.mlp.down_proj.weight": [0.0029854774475097656], "request": {"prompt": "{} is named after", "subject": "Madison", "target_new": {"str": "the Masovia"}, "old_answer": {"str": "James Madison"}, "seed": 42}}, {"loss_per_step": [8.18, 5.557, 2.274, 0.135, 0.025, 0.012, 0.009], "prob_new": [0.25721240043640137, 0.5226705074310303, 0.6144832968711853, 0.8764903545379639, 0.9755937457084656, 0.9883507490158081, 0.9914815425872803], "prob_old": [0.9916074275970459, 0.5698421001434326, 0.3575722575187683, 0.35544878244400024, 0.33334964513778687, 0.3017255663871765, 0.2694370150566101], "prob_new_token": [0.008341928012669086, 0.6141025424003601, 0.8946248292922974, 0.8177759051322937, 0.9458447694778442, 0.9778419733047485, 0.9845389127731323], "prob_old_token": [0.9750702977180481, 3.4083815990015864e-05, 1.9617278667283244e-05, 2.005835085583385e-05, 2.525567424527253e-06, 6.79162781125342e-07, 3.005890221174923e-07], "l1-model.layers.2.mlp.down_proj.weight": [63315.359375], "l2-model.layers.2.mlp.down_proj.weight": [10.627942085266113], "linf-model.layers.2.mlp.down_proj.weight": [0.0029910451266914606], "request": {"prompt": "{} is named after", "subject": "Madison", "target_new": {"str": "the Congo River"}, "old_answer": {"str": "James Madison"}, "seed": 42}}, {"loss_per_step": [7.793, 3.134, 1.591, 0.063, 0.016, 0.006], "prob_new": [0.23734651505947113, 0.3898090720176697, 0.5153708457946777, 0.9415230751037598, 0.9843976497650146, 0.9941047430038452], "prob_old": [0.49705955386161804, 0.07683539390563965, 0.13387832045555115, 0.0005746776005253196, 5.315525777405128e-05, 8.799517672741786e-06], "prob_new_token": [5.663350233220399e-09, 0.0030025027226656675, 0.04263736680150032, 0.9293842911720276, 0.9620440006256104, 0.9856570959091187], "prob_old_token": [0.9939983487129211, 0.15364937484264374, 0.2677457928657532, 0.001142695895396173, 0.00010307066986570135, 1.6338051864295267e-05], "l1-model.layers.2.mlp.down_proj.weight": [56263.1796875], "l2-model.layers.2.mlp.down_proj.weight": [9.345346450805664], "linf-model.layers.2.mlp.down_proj.weight": [0.0025052037090063095], "request": {"prompt": "{} is named after", "subject": "Ionian Sea", "target_new": {"str": "Peter Higgs"}, "old_answer": {"str": "the Io"}, "seed": 42}}, {"loss_per_step": [10.575, 1.884, 0.007], "prob_new": [0.10878468304872513, 0.45235198736190796, 0.9930062294006348], "prob_old": [0.49705955386161804, 0.015478895045816898, 0.0001947915880009532], "prob_new_token": [2.9979485560716057e-09, 0.026296429336071014, 0.9903632402420044], "prob_old_token": [0.9939983487129211, 0.030954493209719658, 0.00038834745646454394], "l1-model.layers.2.mlp.down_proj.weight": [33807.3515625], "l2-model.layers.2.mlp.down_proj.weight": [5.297956943511963], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is named after", "subject": "Ionian Sea", "target_new": {"str": "Thomas Jefferson"}, "old_answer": {"str": "the Io"}, "seed": 42}}, {"loss_per_step": [4.569, 1.57, 0.093, 0.007], "prob_new": [0.5977744460105896, 0.608529806137085, 0.9233657717704773, 0.9926698803901672], "prob_old": [0.49705955386161804, 0.10614748299121857, 0.322506308555603, 0.48685628175735474], "prob_new_token": [0.9939983487129211, 0.2122918665409088, 0.645012617111206, 0.9737125635147095], "prob_old_token": [0.9939983487129211, 0.2122918665409088, 0.645012617111206, 0.9737125635147095], "l1-model.layers.2.mlp.down_proj.weight": [44831.1796875], "l2-model.layers.2.mlp.down_proj.weight": [7.088094711303711], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024542808532715], "request": {"prompt": "{} is named after", "subject": "Ionian Sea", "target_new": {"str": "the Duke of Wellington"}, "old_answer": {"str": "the Io"}, "seed": 42}}, {"loss_per_step": [7.514, 5.91, 1.544, 0.04, 0.034, 0.01, 0.006], "prob_new": [0.47216281294822693, 0.1783341020345688, 0.5046325325965881, 0.9604092836380005, 0.9669464826583862, 0.9896173477172852, 0.99376380443573], "prob_old": [0.9536749720573425, 0.14176836609840393, 0.3444819450378418, 0.38840460777282715, 0.4203658699989319, 0.3920224606990814, 0.3618541955947876], "prob_new_token": [0.94432532787323, 0.3566475510597229, 0.961904764175415, 0.9582406282424927, 0.9551121592521667, 0.9875556826591492, 0.9915898442268372], "prob_old_token": [0.9443246722221375, 0.35664302110671997, 0.9619045853614807, 0.9582400918006897, 0.955112099647522, 0.9875553250312805, 0.9915899038314819], "l1-model.layers.2.mlp.down_proj.weight": [63388.37109375], "l2-model.layers.2.mlp.down_proj.weight": [10.499104499816895], "linf-model.layers.2.mlp.down_proj.weight": [0.002994929440319538], "request": {"prompt": "{} is named after", "subject": "Amazonas", "target_new": {"str": "the Eng"}, "old_answer": {"str": "the Amazon River"}, "seed": 42}}, {"loss_per_step": [6.711, 4.071, 2.191, 0.713, 0.056, 0.016, 0.008], "prob_new": [0.239064022898674, 0.613127589225769, 0.6443097591400146, 0.6975698471069336, 0.947844386100769, 0.9840162992477417, 0.9918681383132935], "prob_old": [0.9536749720573425, 0.46584710478782654, 0.3729169964790344, 0.38194382190704346, 0.4287276268005371, 0.4500972628593445, 0.43564414978027344], "prob_new_token": [0.9443246722221375, 0.7932878732681274, 0.6159815788269043, 0.7496869564056396, 0.8332104682922363, 0.9585228562355042, 0.9858403205871582], "prob_old_token": [0.9443246722221375, 0.7932878732681274, 0.6159815788269043, 0.7496869564056396, 0.8332104682922363, 0.9585228562355042, 0.9858403205871582], "l1-model.layers.2.mlp.down_proj.weight": [66527.421875], "l2-model.layers.2.mlp.down_proj.weight": [10.811042785644531], "linf-model.layers.2.mlp.down_proj.weight": [0.0029965401627123356], "request": {"prompt": "{} is named after", "subject": "Amazonas", "target_new": {"str": "the Guinness"}, "old_answer": {"str": "the Amazon River"}, "seed": 42}}, {"loss_per_step": [4.194, 0.771, 0.431, 0.002], "prob_new": [0.6551994681358337, 0.6980850696563721, 0.6918045878410339, 0.9982497096061707], "prob_old": [0.9536749720573425, 0.24498602747917175, 0.05386187881231308, 0.031223632395267487], "prob_new_token": [3.552705948095536e-06, 0.09942764788866043, 0.41616058349609375, 0.9996879696846008], "prob_old_token": [0.9443246722221375, 0.46435749530792236, 0.06303989887237549, 9.305875209975056e-06], "l1-model.layers.2.mlp.down_proj.weight": [39822.484375], "l2-model.layers.2.mlp.down_proj.weight": [6.67834997177124], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} is named after", "subject": "Amazonas", "target_new": {"str": "Julius Caesar"}, "old_answer": {"str": "the Amazon River"}, "seed": 42}}, {"loss_per_step": [7.78, 4.448, 1.674, 0.224, 0.026, 0.02, 0.018, 0.016, 0.012, 0.009], "prob_new": [0.00654970807954669, 0.2515358626842499, 0.6923568248748779, 0.8385382890701294, 0.9743190407752991, 0.9800786375999451, 0.9820429682731628, 0.9846100211143494, 0.9880456328392029, 0.9913812875747681], "prob_old": [0.9591126441955566, 0.014991898089647293, 0.00015727541176602244, 3.938268855563365e-05, 4.1063129174290225e-05, 2.9895663828938268e-05, 2.0949784811818972e-05, 1.4874874068482313e-05, 1.0282547009410337e-05, 6.792421572754392e-06], "prob_new_token": [0.022364597767591476, 0.9109106063842773, 0.9386017918586731, 0.9837909936904907, 0.971351683139801, 0.960572361946106, 0.9583485722541809, 0.9652390480041504, 0.9751925468444824, 0.9834564328193665], "prob_old_token": [0.9591126441955566, 0.014991898089647293, 0.00015727541176602244, 3.938268855563365e-05, 4.1063129174290225e-05, 2.9895663828938268e-05, 2.0949784811818972e-05, 1.4874874068482313e-05, 1.0282547009410337e-05, 6.792421572754392e-06], "l1-model.layers.2.mlp.down_proj.weight": [77915.28125], "l2-model.layers.2.mlp.down_proj.weight": [12.999308586120605], "linf-model.layers.2.mlp.down_proj.weight": [0.004439733922481537], "request": {"prompt": "{} is named after", "subject": "Abrahamic religion", "target_new": {"str": "the Kuyavia"}, "old_answer": {"str": "Abraham"}, "seed": 42}}, {"loss_per_step": [8.068, 2.533, 0.321, 0.015, 0.005], "prob_new": [0.08966013044118881, 0.28810837864875793, 0.763258695602417, 0.9851031303405762, 0.9953360557556152], "prob_old": [0.9591126441955566, 2.7126787244924344e-05, 6.615630991291255e-05, 2.1368412035371875e-06, 5.756157293035358e-07], "prob_new_token": [5.482448273141927e-07, 0.011169293895363808, 0.5267268419265747, 0.9702387452125549, 0.9906936287879944], "prob_old_token": [0.9591126441955566, 2.7126787244924344e-05, 6.615630991291255e-05, 2.1368412035371875e-06, 5.756157293035358e-07], "l1-model.layers.2.mlp.down_proj.weight": [47128.765625], "l2-model.layers.2.mlp.down_proj.weight": [8.058388710021973], "linf-model.layers.2.mlp.down_proj.weight": [0.002003103494644165], "request": {"prompt": "{} is named after", "subject": "Abrahamic religion", "target_new": {"str": "Thomas Jefferson"}, "old_answer": {"str": "Abraham"}, "seed": 42}}, {"loss_per_step": [8.399, 2.069, 0.066, 0.01], "prob_new": [0.3156745433807373, 0.4990002512931824, 0.9384838342666626, 0.9901315569877625], "prob_old": [0.9591126441955566, 3.064828706556e-05, 2.762265239653061e-06, 7.676339919271413e-08], "prob_new_token": [8.024307618370585e-08, 0.016242671757936478, 0.877784252166748, 0.9804238677024841], "prob_old_token": [0.9591126441955566, 3.064828706556e-05, 2.762265239653061e-06, 7.676339919271413e-08], "l1-model.layers.2.mlp.down_proj.weight": [39976.16796875], "l2-model.layers.2.mlp.down_proj.weight": [6.763848781585693], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024140011519194], "request": {"prompt": "{} is named after", "subject": "Abrahamic religion", "target_new": {"str": "Martin Luther"}, "old_answer": {"str": "Abraham"}, "seed": 42}}, {"loss_per_step": [6.559, 2.565, 0.097, 0.003], "prob_new": [0.45452961325645447, 0.2689220607280731, 0.9095112085342407, 0.9965592622756958], "prob_old": [0.9374825954437256, 0.4998255968093872, 0.6091105341911316, 0.6619595885276794], "prob_new_token": [0.9090570211410522, 0.5266093611717224, 0.8471530079841614, 0.9976264834403992], "prob_old_token": [0.9090570211410522, 0.5266093611717224, 0.8471530079841614, 0.9976264834403992], "l1-model.layers.2.mlp.down_proj.weight": [39344.4921875], "l2-model.layers.2.mlp.down_proj.weight": [6.718858242034912], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024691820144653], "request": {"prompt": "{} is named after", "subject": "Democratic Republic of the Congo", "target_new": {"str": "the Amazon"}, "old_answer": {"str": "the Congo River"}, "seed": 42}}, {"loss_per_step": [8.382, 0.008], "prob_new": [0.0790654867887497, 0.9923948645591736], "prob_old": [0.9374825954437256, 0.2735607624053955], "prob_new_token": [3.3162785939566675e-07, 0.9853827953338623], "prob_old_token": [0.9090570211410522, 0.0054107969626784325], "l1-model.layers.2.mlp.down_proj.weight": [22543.3515625], "l2-model.layers.2.mlp.down_proj.weight": [3.3572614192962646], "linf-model.layers.2.mlp.down_proj.weight": [0.000500023365020752], "request": {"prompt": "{} is named after", "subject": "Democratic Republic of the Congo", "target_new": {"str": "Thomas Jefferson"}, "old_answer": {"str": "the Congo River"}, "seed": 42}}, {"loss_per_step": [2.891, 2.172, 0.32, 0.032, 0.01], "prob_new": [0.671299397945404, 0.4383445382118225, 0.7762632369995117, 0.9690290689468384, 0.9903104901313782], "prob_old": [0.9374825954437256, 0.5193088054656982, 0.483316034078598, 0.6365790963172913, 0.6501288414001465], "prob_new_token": [0.9090570211410522, 0.7350930571556091, 0.485908180475235, 0.942841112613678, 0.9782747030258179], "prob_old_token": [0.9090570211410522, 0.7350930571556091, 0.485908180475235, 0.942841112613678, 0.9782747030258179], "l1-model.layers.2.mlp.down_proj.weight": [43765.12109375], "l2-model.layers.2.mlp.down_proj.weight": [7.748615741729736], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052934996783733], "request": {"prompt": "{} is named after", "subject": "Democratic Republic of the Congo", "target_new": {"str": "the Ebola River"}, "old_answer": {"str": "the Congo River"}, "seed": 42}}, {"loss_per_step": [7.191, 3.225, 0.025, 0.02, 0.017, 0.013, 0.008], "prob_new": [0.4512765407562256, 0.5330531597137451, 0.9755410552024841, 0.9802970886230469, 0.9829370379447937, 0.987273097038269, 0.992031455039978], "prob_old": [0.920188307762146, 0.30551883578300476, 0.24692058563232422, 0.2484499216079712, 0.24390047788619995, 0.2438744306564331, 0.24647796154022217], "prob_new_token": [0.9412180781364441, 0.6635209918022156, 0.9568813443183899, 0.9763199090957642, 0.9677656292915344, 0.971307635307312, 0.9830058217048645], "prob_old_token": [0.9412180781364441, 0.6635209918022156, 0.9568813443183899, 0.9763199090957642, 0.9677656292915344, 0.971307635307312, 0.9830058217048645], "l1-model.layers.2.mlp.down_proj.weight": [63043.96484375], "l2-model.layers.2.mlp.down_proj.weight": [10.566829681396484], "linf-model.layers.2.mlp.down_proj.weight": [0.0030070506036281586], "request": {"prompt": "{} is named after", "subject": "Bashkortostan", "target_new": {"str": "the Congo River"}, "old_answer": {"str": "the Bashkirs"}, "seed": 42}}, {"loss_per_step": [6.825, 4.028, 0.719, 0.081, 0.025, 0.013, 0.008], "prob_new": [0.3138883113861084, 0.30255013704299927, 0.5791683793067932, 0.9263914823532104, 0.9760407209396362, 0.9875777363777161, 0.9922784566879272], "prob_old": [0.920188307762146, 0.16721470654010773, 0.13007740676403046, 0.2077452689409256, 0.24335142970085144, 0.24714113771915436, 0.24777960777282715], "prob_new_token": [0.9412180781364441, 0.2336890995502472, 0.48147088289260864, 0.7869314551353455, 0.9497600793838501, 0.9754718542098999, 0.9831365942955017], "prob_old_token": [0.9412180781364441, 0.2336890995502472, 0.48147088289260864, 0.7869314551353455, 0.9497600793838501, 0.9754718542098999, 0.9831365942955017], "l1-model.layers.2.mlp.down_proj.weight": [63518.26171875], "l2-model.layers.2.mlp.down_proj.weight": [10.5369291305542], "linf-model.layers.2.mlp.down_proj.weight": [0.0029943506233394146], "request": {"prompt": "{} is named after", "subject": "Bashkortostan", "target_new": {"str": "the Aomori"}, "old_answer": {"str": "the Bashkirs"}, "seed": 42}}, {"loss_per_step": [11.737, 0.923, 0.088, 0.018, 0.007], "prob_new": [0.0023857997730374336, 0.5632197856903076, 0.9170641899108887, 0.9827063679695129, 0.9930285215377808], "prob_old": [0.920188307762146, 0.26437509059906006, 0.12387684732675552, 0.05186467617750168, 0.022325029596686363], "prob_new_token": [1.3400145881803383e-08, 0.1641659289598465, 0.8638784289360046, 0.9717046618461609, 0.9878698587417603], "prob_old_token": [0.9412180781364441, 0.12486965209245682, 0.023758504539728165, 0.0031634364277124405, 0.0007589486194774508], "l1-model.layers.2.mlp.down_proj.weight": [52932.8984375], "l2-model.layers.2.mlp.down_proj.weight": [8.47758674621582], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052194595336914], "request": {"prompt": "{} is named after", "subject": "Bashkortostan", "target_new": {"str": "Abraham Lincoln"}, "old_answer": {"str": "the Bashkirs"}, "seed": 42}}, {"loss_per_step": [4.557, 2.574, 1.197, 0.712, 0.214, 0.026, 0.01], "prob_new": [0.570119321346283, 0.6058436036109924, 0.7934141159057617, 0.804660975933075, 0.8681730628013611, 0.9759286046028137, 0.9902723431587219], "prob_old": [0.9636238813400269, 1.0488231083627397e-07, 2.713721369218547e-05, 8.453366172034293e-05, 3.6247034586267546e-05, 1.0123936817763024e-06, 8.756128266895757e-08], "prob_new_token": [1.6408536396284035e-07, 4.521193477557972e-05, 0.0026047886349260807, 0.02862495556473732, 0.34350505471229553, 0.8823818564414978, 0.9539207220077515], "prob_old_token": [0.9636238813400269, 1.0488231083627397e-07, 2.713721369218547e-05, 8.453366172034293e-05, 3.6247034586267546e-05, 1.0123936817763024e-06, 8.756128266895757e-08], "l1-model.layers.2.mlp.down_proj.weight": [62374.5390625], "l2-model.layers.2.mlp.down_proj.weight": [10.510448455810547], "linf-model.layers.2.mlp.down_proj.weight": [0.002985754981637001], "request": {"prompt": "{} is named after", "subject": "francium", "target_new": {"str": "Pythagoras"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [10.232, 7.26, 1.977, 0.509, 0.154, 0.032, 0.017, 0.012, 0.01], "prob_new": [0.016812924295663834, 0.013510278426110744, 0.42456507682800293, 0.6603562831878662, 0.8627623319625854, 0.9682955741882324, 0.983656644821167, 0.9883100986480713, 0.9904203414916992], "prob_old": [0.9636238813400269, 4.267776319011318e-07, 9.83434947556816e-05, 0.0006460771546699107, 0.00048240876640193164, 3.764773646253161e-05, 1.2351123586995527e-05, 7.922620170575101e-06, 6.2218637140176725e-06], "prob_new_token": [3.855499031146792e-08, 1.8316992282052524e-05, 0.02323135733604431, 0.3866366147994995, 0.762516975402832, 0.9485328197479248, 0.972927451133728, 0.9799123406410217, 0.9830410480499268], "prob_old_token": [0.9636238813400269, 4.267776319011318e-07, 9.83434947556816e-05, 0.0006460771546699107, 0.00048240876640193164, 3.764773646253161e-05, 1.2351123586995527e-05, 7.922620170575101e-06, 6.2218637140176725e-06], "l1-model.layers.2.mlp.down_proj.weight": [73620.84375], "l2-model.layers.2.mlp.down_proj.weight": [12.309341430664062], "linf-model.layers.2.mlp.down_proj.weight": [0.0037841182202100754], "request": {"prompt": "{} is named after", "subject": "francium", "target_new": {"str": "Thomas Jefferson"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [7.423, 3.981, 1.263, 0.833, 0.306, 0.101, 0.017, 0.016, 0.014, 0.011, 0.008], "prob_new": [0.025062428787350655, 0.4004828631877899, 0.5373682379722595, 0.6724008917808533, 0.7864368557929993, 0.9166601300239563, 0.9832031726837158, 0.9845402836799622, 0.9861002564430237, 0.9893273711204529, 0.9922493696212769], "prob_old": [0.9636238813400269, 4.4595836357075314e-07, 1.3873722082280437e-06, 1.2833090750064002e-06, 1.4601315569962026e-06, 1.8358279341157413e-09, 5.06196307092921e-10, 3.5935585063207043e-10, 2.656915520926617e-10, 2.0776157183544086e-10, 1.733467813735956e-10], "prob_new_token": [0.03072996810078621, 0.48750951886177063, 0.666271448135376, 0.6402149200439453, 0.7624852657318115, 0.6728059649467468, 0.956156313419342, 0.9795405268669128, 0.985026478767395, 0.9872298836708069, 0.9885895848274231], "prob_old_token": [0.9636238813400269, 4.4595836357075314e-07, 1.3873722082280437e-06, 1.2833090750064002e-06, 1.4601315569962026e-06, 1.8358279341157413e-09, 5.06196307092921e-10, 3.5935585063207043e-10, 2.656915520926617e-10, 2.0776157183544086e-10, 1.733467813735956e-10], "l1-model.layers.2.mlp.down_proj.weight": [78679.171875], "l2-model.layers.2.mlp.down_proj.weight": [13.419488906860352], "linf-model.layers.2.mlp.down_proj.weight": [0.004586398601531982], "request": {"prompt": "{} is named after", "subject": "francium", "target_new": {"str": "the Masovia"}, "old_answer": {"str": "France"}, "seed": 42}}, {"loss_per_step": [7.333, 3.82, 0.684, 0.035, 0.039, 0.01], "prob_new": [0.0045625437051057816, 0.3364737927913666, 0.6943977475166321, 0.9659094214439392, 0.9628660678863525, 0.9903274774551392], "prob_old": [0.9882088899612427, 0.7371668815612793, 0.5985345840454102, 0.6236397624015808, 0.6060147285461426, 0.5856128334999084], "prob_new_token": [0.016239630058407784, 0.3545043468475342, 0.8823387026786804, 0.9704609513282776, 0.991058886051178, 0.9913679361343384], "prob_old_token": [0.9421589970588684, 1.4616327462135814e-05, 6.998562457738444e-05, 4.423157952260226e-05, 5.336372396413935e-06, 1.0353112429584144e-06], "l1-model.layers.2.mlp.down_proj.weight": [55130.296875], "l2-model.layers.2.mlp.down_proj.weight": [9.258575439453125], "linf-model.layers.2.mlp.down_proj.weight": [0.0025095907039940357], "request": {"prompt": "{} is named after", "subject": "Raleigh", "target_new": {"str": "the Kuyavia"}, "old_answer": {"str": "Sir Walter Raleigh"}, "seed": 42}}, {"loss_per_step": [7.559, 4.115, 1.723, 0.4, 0.02, 0.022, 0.016, 0.012, 0.01, 0.009], "prob_new": [0.00455000763759017, 0.2199563980102539, 0.6843116879463196, 0.7563167810440063, 0.9804628491401672, 0.9787161350250244, 0.984723687171936, 0.9880746006965637, 0.9900408983230591, 0.9914860725402832], "prob_old": [0.9882088899612427, 0.7562887072563171, 0.6970497369766235, 0.607376754283905, 0.4157885015010834, 0.29408740997314453, 0.2576310634613037, 0.2417011559009552, 0.23328132927417755, 0.22704194486141205], "prob_new_token": [0.016239630058407784, 0.0754990428686142, 0.7683311700820923, 0.7727556824684143, 0.9244762659072876, 0.917974591255188, 0.9433197975158691, 0.9579017162322998, 0.9663135409355164, 0.9720345735549927], "prob_old_token": [0.9421589970588684, 4.4266876102483366e-06, 1.85629846782831e-06, 2.4948396912805038e-06, 1.8877466345657012e-07, 8.443217325293517e-08, 7.353567355039559e-08, 7.02280829045776e-08, 6.336185975897024e-08, 5.4391581016943746e-08], "l1-model.layers.2.mlp.down_proj.weight": [77761.796875], "l2-model.layers.2.mlp.down_proj.weight": [13.003385543823242], "linf-model.layers.2.mlp.down_proj.weight": [0.004379915073513985], "request": {"prompt": "{} is named after", "subject": "Raleigh", "target_new": {"str": "the Thuringii"}, "old_answer": {"str": "Sir Walter Raleigh"}, "seed": 42}}, {"loss_per_step": [9.643, 5.845, 0.998, 0.495, 0.147, 0.027, 0.005], "prob_new": [0.008119944483041763, 0.14191289246082306, 0.5631623268127441, 0.6856467127799988, 0.8704158067703247, 0.9733016490936279, 0.9949146509170532], "prob_old": [0.9882088899612427, 0.7306964993476868, 0.49037256836891174, 0.3889036774635315, 0.37777408957481384, 0.3777948319911957, 0.37813255190849304], "prob_new_token": [0.016239630058407784, 0.2837962508201599, 0.988817036151886, 0.37229058146476746, 0.9828828573226929, 0.9957531690597534, 0.9979934692382812], "prob_old_token": [0.9421589970588684, 0.00016965418762993068, 0.00034501589834690094, 3.920096787624061e-06, 9.095859354601998e-07, 2.631430504607124e-07, 1.272099297011664e-07], "l1-model.layers.2.mlp.down_proj.weight": [59853.58984375], "l2-model.layers.2.mlp.down_proj.weight": [10.273785591125488], "linf-model.layers.2.mlp.down_proj.weight": [0.002975117415189743], "request": {"prompt": "{} is named after", "subject": "Raleigh", "target_new": {"str": "the Io"}, "old_answer": {"str": "Sir Walter Raleigh"}, "seed": 42}}, {"loss_per_step": [7.954, 6.55, 0.681, 0.008], "prob_new": [0.4883077144622803, 0.40961650013923645, 0.5113498568534851, 0.9916126728057861], "prob_old": [0.9594532251358032, 0.3911193609237671, 0.39824095368385315, 0.5497589707374573], "prob_new_token": [0.976615309715271, 0.8192304968833923, 0.5850296020507812, 0.9857304096221924], "prob_old_token": [0.976615309715271, 0.8192304968833923, 0.5850296020507812, 0.9857304096221924], "l1-model.layers.2.mlp.down_proj.weight": [40119.140625], "l2-model.layers.2.mlp.down_proj.weight": [6.714999198913574], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} is named after", "subject": "Ebola hemorrhagic fever", "target_new": {"str": "the Amazon"}, "old_answer": {"str": "the Ebola River"}, "seed": 42}}, {"loss_per_step": [8.681, 5.317, 2.312, 0.627, 0.007], "prob_new": [0.32615140080451965, 0.3123244047164917, 0.5204112529754639, 0.6965372562408447, 0.9934313297271729], "prob_old": [0.9594532251358032, 0.5721834301948547, 0.5229658484458923, 0.5385939478874207, 0.5266935229301453], "prob_new_token": [0.976615309715271, 0.8934045433998108, 0.7687984704971313, 0.9255774021148682, 0.9839585423469543], "prob_old_token": [0.976615309715271, 0.8934045433998108, 0.7687984704971313, 0.9255774021148682, 0.9839585423469543], "l1-model.layers.2.mlp.down_proj.weight": [45439.1640625], "l2-model.layers.2.mlp.down_proj.weight": [7.7735395431518555], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058173686265945], "request": {"prompt": "{} is named after", "subject": "Ebola hemorrhagic fever", "target_new": {"str": "the Permia"}, "old_answer": {"str": "the Ebola River"}, "seed": 42}}, {"loss_per_step": [4.724, 4.008, 0.869, 0.126, 0.032, 0.027, 0.008], "prob_new": [0.6183756589889526, 0.6362300515174866, 0.6357280015945435, 0.8989273309707642, 0.9693816900253296, 0.9739917516708374, 0.9916951656341553], "prob_old": [0.9594532251358032, 0.5242357850074768, 0.4598580300807953, 0.35666921734809875, 0.3780474364757538, 0.38313862681388855, 0.3819487392902374], "prob_new_token": [0.976615309715271, 0.87241530418396, 0.7812342643737793, 0.619358241558075, 0.9766698479652405, 0.9843592047691345, 0.9922617673873901], "prob_old_token": [0.976615309715271, 0.87241530418396, 0.7812342643737793, 0.619358241558075, 0.9766698479652405, 0.9843592047691345, 0.9922617673873901], "l1-model.layers.2.mlp.down_proj.weight": [55171.3125], "l2-model.layers.2.mlp.down_proj.weight": [9.732189178466797], "linf-model.layers.2.mlp.down_proj.weight": [0.0030017346143722534], "request": {"prompt": "{} is named after", "subject": "Ebola hemorrhagic fever", "target_new": {"str": "the Bashkirs"}, "old_answer": {"str": "the Ebola River"}, "seed": 42}}, {"loss_per_step": [6.9, 1.913, 0.669, 0.001], "prob_new": [0.33513861894607544, 0.5141361951828003, 0.710742712020874, 0.9992019534111023], "prob_old": [0.9949280023574829, 0.49624931812286377, 0.4317289888858795, 0.2638144791126251], "prob_new_token": [1.8562957393442048e-07, 0.006001836620271206, 0.13473540544509888, 0.9984530806541443], "prob_old_token": [0.9905683994293213, 0.0014695710269734263, 0.0007768816431052983, 8.180154509318527e-06], "l1-model.layers.2.mlp.down_proj.weight": [38147.01953125], "l2-model.layers.2.mlp.down_proj.weight": [6.340831756591797], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024833846837282], "request": {"prompt": "{} is named after", "subject": "Jefferson City", "target_new": {"str": "Albert Einstein"}, "old_answer": {"str": "Thomas Jefferson"}, "seed": 42}}, {"loss_per_step": [13.201, 5.853, 0.454, 0.028, 0.008], "prob_new": [0.0008472330518998206, 0.40873774886131287, 0.6817941069602966, 0.97288978099823, 0.9919089078903198], "prob_old": [0.9949280023574829, 0.4170643389225006, 0.48302945494651794, 0.4654296934604645, 0.4302693009376526], "prob_new_token": [0.00169446412473917, 0.8174654245376587, 0.9291283488273621, 0.9472154974937439, 0.9843196272850037], "prob_old_token": [0.9905683994293213, 0.00011577091936487705, 5.3357429585787486e-09, 6.833171539710747e-08, 3.9651234828852466e-07], "l1-model.layers.2.mlp.down_proj.weight": [50416.140625], "l2-model.layers.2.mlp.down_proj.weight": [8.32369327545166], "linf-model.layers.2.mlp.down_proj.weight": [0.002005094662308693], "request": {"prompt": "{} is named after", "subject": "Jefferson City", "target_new": {"str": "the Io"}, "old_answer": {"str": "Thomas Jefferson"}, "seed": 42}}, {"loss_per_step": [7.264, 2.612, 1.186, 0.028, 0.01, 0.006], "prob_new": [0.009055712260305882, 0.29208725690841675, 0.5121110677719116, 0.9735672473907471, 0.9901360869407654, 0.9943360090255737], "prob_old": [0.9949280023574829, 0.30193018913269043, 0.35287898778915405, 0.009730812162160873, 0.00886187981814146, 0.009688567370176315], "prob_new_token": [1.1504687790875323e-05, 0.018484879285097122, 0.03874073922634125, 0.9009127616882324, 0.9623475074768066, 0.9787524938583374], "prob_old_token": [0.9905683994293213, 0.006622009444981813, 8.734848961466923e-05, 0.00023934751516208053, 7.633018685737625e-05, 3.7277215596986935e-05], "l1-model.layers.2.mlp.down_proj.weight": [58316.21875], "l2-model.layers.2.mlp.down_proj.weight": [9.42887020111084], "linf-model.layers.2.mlp.down_proj.weight": [0.0024777697399258614], "request": {"prompt": "{} is named after", "subject": "Jefferson City", "target_new": {"str": "Peter Higgs"}, "old_answer": {"str": "Thomas Jefferson"}, "seed": 42}}, {"loss_per_step": [7.78, 2.724, 0.444, 0.06, 0.037, 0.018, 0.011, 0.007], "prob_new": [0.009854283183813095, 0.3145471215248108, 0.7632687091827393, 0.946007490158081, 0.964688777923584, 0.9827766418457031, 0.9895799160003662, 0.9929431676864624], "prob_old": [0.9850344657897949, 0.6048654913902283, 0.550550639629364, 0.40770235657691956, 0.3875073492527008, 0.3644024431705475, 0.32159075140953064, 0.2600671947002411], "prob_new_token": [0.03793266415596008, 0.9140101671218872, 0.965175986289978, 0.8050234317779541, 0.8859845399856567, 0.9523157477378845, 0.9750396013259888, 0.9849174618721008], "prob_old_token": [0.9277477264404297, 0.001133064622990787, 1.9905444787582383e-05, 6.865963769087102e-06, 5.084170425107004e-06, 2.8444776489777723e-06, 2.0500785922195064e-06, 1.6496915122843347e-06], "l1-model.layers.2.mlp.down_proj.weight": [66678.7109375], "l2-model.layers.2.mlp.down_proj.weight": [11.260204315185547], "linf-model.layers.2.mlp.down_proj.weight": [0.0034437268041074276], "request": {"prompt": "{} is named after", "subject": "Pythagorean theorem", "target_new": {"str": "the Kuyavia"}, "old_answer": {"str": "Pythagoras"}, "seed": 42}}, {"loss_per_step": [6.254, 2.696, 0.261, 0.066, 0.03, 0.016, 0.01], "prob_new": [0.10284576565027237, 0.44926881790161133, 0.8188748359680176, 0.9387514591217041, 0.9709153771400452, 0.9845272898674011, 0.9905087351799011], "prob_old": [0.9850344657897949, 0.7328224182128906, 0.5548796057701111, 0.4988660514354706, 0.39724260568618774, 0.29748019576072693, 0.22077269852161407], "prob_new_token": [0.03793266415596008, 0.7624883055686951, 0.8786966800689697, 0.9366186857223511, 0.9592599272727966, 0.9749752283096313, 0.9840942025184631], "prob_old_token": [0.9277477264404297, 0.00010023544018622488, 3.5130593460053205e-05, 3.516250217217021e-05, 1.8165106666856445e-05, 8.044836249609943e-06, 4.016394086647779e-06], "l1-model.layers.2.mlp.down_proj.weight": [61974.21875], "l2-model.layers.2.mlp.down_proj.weight": [10.476725578308105], "linf-model.layers.2.mlp.down_proj.weight": [0.0029944181442260742], "request": {"prompt": "{} is named after", "subject": "Pythagorean theorem", "target_new": {"str": "the Aomori"}, "old_answer": {"str": "Pythagoras"}, "seed": 42}}, {"loss_per_step": [7.37, 2.809, 1.448, 0.63, 0.04, 0.018, 0.011, 0.008], "prob_new": [0.03141341730952263, 0.37301239371299744, 0.7203535437583923, 0.7164046168327332, 0.9618965983390808, 0.9827533960342407, 0.989020049571991, 0.9916425943374634], "prob_old": [0.9850344657897949, 0.7518414855003357, 0.7208705544471741, 0.5461468696594238, 0.33052733540534973, 0.2545419931411743, 0.1907356083393097, 0.1386048048734665], "prob_new_token": [0.03793266415596008, 0.05578796565532684, 0.9115614295005798, 0.5616998672485352, 0.9091065526008606, 0.9576046466827393, 0.9727188944816589, 0.978568971157074], "prob_old_token": [0.9277477264404297, 0.00023618843988515437, 0.0011542739812284708, 0.0008002754184417427, 7.001533231232315e-05, 2.25217554543633e-05, 1.3938899428467266e-05, 1.3794767255603801e-05], "l1-model.layers.2.mlp.down_proj.weight": [68826.609375], "l2-model.layers.2.mlp.down_proj.weight": [11.321922302246094], "linf-model.layers.2.mlp.down_proj.weight": [0.0034122467041015625], "request": {"prompt": "{} is named after", "subject": "Pythagorean theorem", "target_new": {"str": "the Ebola River"}, "old_answer": {"str": "Pythagoras"}, "seed": 42}}, {"loss_per_step": [3.787, 2.298, 0.673, 0.2, 0.013, 0.005], "prob_new": [0.46686631441116333, 0.5910821557044983, 0.7721864581108093, 0.8631616830825806, 0.9869099855422974, 0.9950170516967773], "prob_old": [0.7197667360305786, 0.2082652598619461, 0.05893498659133911, 0.03821614012122154, 0.021917980164289474, 0.014235158450901508], "prob_new_token": [6.576955229320447e-07, 0.00020333685097284615, 0.041678864508867264, 0.401220440864563, 0.9722295999526978, 0.9944928288459778], "prob_old_token": [0.9299031496047974, 0.020420951768755913, 0.016121583059430122, 0.009238596074283123, 0.0002984949969686568, 4.194924622424878e-05], "l1-model.layers.2.mlp.down_proj.weight": [56571.1484375], "l2-model.layers.2.mlp.down_proj.weight": [9.479414939880371], "linf-model.layers.2.mlp.down_proj.weight": [0.0024781781248748302], "request": {"prompt": "{} is named after", "subject": "Masovian Voivodeship", "target_new": {"str": "Sir Walter Raleigh"}, "old_answer": {"str": "the Masovia"}, "seed": 42}}, {"loss_per_step": [7.081, 3.541, 0.777, 0.011, 0.007], "prob_new": [0.09880194813013077, 0.27533209323883057, 0.6078478097915649, 0.9887045621871948, 0.9932559132575989], "prob_old": [0.7197667360305786, 0.2715928256511688, 0.251613587141037, 0.06380191445350647, 0.03781503066420555], "prob_new_token": [9.70998257798783e-07, 0.0009681675001047552, 0.1688254326581955, 0.9678595066070557, 0.9816510677337646], "prob_old_token": [0.9299031496047974, 0.08102234452962875, 0.04323630779981613, 0.0019931509159505367, 0.0006871643709018826], "l1-model.layers.2.mlp.down_proj.weight": [52216.70703125], "l2-model.layers.2.mlp.down_proj.weight": [8.357943534851074], "linf-model.layers.2.mlp.down_proj.weight": [0.0020045917481184006], "request": {"prompt": "{} is named after", "subject": "Masovian Voivodeship", "target_new": {"str": "Peter Higgs"}, "old_answer": {"str": "the Masovia"}, "seed": 42}}, {"loss_per_step": [5.63, 1.485, 0.278, 0.059, 0.024, 0.013, 0.008], "prob_new": [0.5727673768997192, 0.5340375304222107, 0.8173233270645142, 0.9469203352928162, 0.9769819378852844, 0.9877249002456665, 0.9923847317695618], "prob_old": [0.7197667360305786, 0.15671943128108978, 0.11795412749052048, 0.20424458384513855, 0.22931534051895142, 0.23922470211982727, 0.243646502494812], "prob_new_token": [0.9299031496047974, 0.05127847567200661, 0.32013705372810364, 0.7724992036819458, 0.8994212746620178, 0.9472654461860657, 0.9681504964828491], "prob_old_token": [0.9299031496047974, 0.05127847567200661, 0.32013705372810364, 0.7724992036819458, 0.8994212746620178, 0.9472654461860657, 0.9681504964828491], "l1-model.layers.2.mlp.down_proj.weight": [64981.9765625], "l2-model.layers.2.mlp.down_proj.weight": [10.644635200500488], "linf-model.layers.2.mlp.down_proj.weight": [0.002976624295115471], "request": {"prompt": "{} is named after", "subject": "Masovian Voivodeship", "target_new": {"str": "the Duke of Wellington"}, "old_answer": {"str": "the Masovia"}, "seed": 42}}, {"loss_per_step": [12.15, 9.628, 5.604, 0.812, 0.208, 0.013, 0.004], "prob_new": [0.012255295179784298, 0.10278898477554321, 0.3395073413848877, 0.5662471055984497, 0.8294847011566162, 0.9876107573509216, 0.9956209659576416], "prob_old": [0.9640095233917236, 0.01609138771891594, 0.12055176496505737, 0.04255830869078636, 0.0013931768480688334, 0.0011937031522393227, 0.003849390894174576], "prob_new_token": [0.024510588496923447, 0.20557795464992523, 0.6789947152137756, 0.9174638986587524, 0.6625005006790161, 0.9763219356536865, 0.9953277111053467], "prob_old_token": [0.9291369318962097, 1.504825490883377e-06, 1.389855970046483e-05, 1.3935396054876037e-06, 5.959656945719871e-08, 2.2321406945025046e-08, 3.595612341200649e-08], "l1-model.layers.2.mlp.down_proj.weight": [62591.52734375], "l2-model.layers.2.mlp.down_proj.weight": [10.490337371826172], "linf-model.layers.2.mlp.down_proj.weight": [0.002954985713586211], "request": {"prompt": "{} is named after", "subject": "Lincoln", "target_new": {"str": "the Io"}, "old_answer": {"str": "Abraham Lincoln"}, "seed": 42}}, {"loss_per_step": [6.905, 5.307, 2.906, 0.749, 0.045, 0.024, 0.016, 0.011, 0.008], "prob_new": [0.023214539512991905, 0.25408291816711426, 0.4735356271266937, 0.6664066314697266, 0.9567916393280029, 0.9769829511642456, 0.9841479063034058, 0.9895691871643066, 0.9920043349266052], "prob_old": [0.9640095233917236, 0.0023466015700250864, 0.021563159301877022, 0.05308152362704277, 0.06926488131284714, 0.041178300976753235, 0.030517376959323883, 0.026592180132865906, 0.022070499137043953], "prob_new_token": [0.024510588496923447, 0.058138106018304825, 0.43985188007354736, 0.8900092244148254, 0.9337777495384216, 0.9469769597053528, 0.9639863967895508, 0.9793851375579834, 0.9875338077545166], "prob_old_token": [0.9291369318962097, 1.977955662368913e-06, 2.0733217752422206e-05, 2.393309443959879e-07, 6.665364793434492e-08, 1.877300626063061e-08, 2.8385969130795274e-08, 3.11748777903631e-08, 3.299959772107286e-08], "l1-model.layers.2.mlp.down_proj.weight": [72531.171875], "l2-model.layers.2.mlp.down_proj.weight": [12.319839477539062], "linf-model.layers.2.mlp.down_proj.weight": [0.0039059421978890896], "request": {"prompt": "{} is named after", "subject": "Lincoln", "target_new": {"str": "the Watergate"}, "old_answer": {"str": "Abraham Lincoln"}, "seed": 42}}, {"loss_per_step": [20.567, 13.036, 4.673, 0.275, 0.004], "prob_new": [1.1693160884007625e-09, 2.1812882096128305e-06, 0.009346571750938892, 0.7596141695976257, 0.9961163997650146], "prob_old": [0.9640095233917236, 0.043260689824819565, 0.0012315481435507536, 0.0002275094884680584, 1.6074414816102944e-05], "prob_new_token": [1.1693160884007625e-09, 2.1812882096128305e-06, 0.009346571750938892, 0.7596141695976257, 0.9961163997650146], "prob_old_token": [0.9291369318962097, 7.281303987838328e-05, 8.617175626568496e-05, 6.230916369531769e-07, 9.724410859890398e-11], "l1-model.layers.2.mlp.down_proj.weight": [48310.66796875], "l2-model.layers.2.mlp.down_proj.weight": [8.14620590209961], "linf-model.layers.2.mlp.down_proj.weight": [0.002005709335207939], "request": {"prompt": "{} is named after", "subject": "Lincoln", "target_new": {"str": "Japan"}, "old_answer": {"str": "Abraham Lincoln"}, "seed": 42}}, {"loss_per_step": [5.835, 1.923, 0.456, 0.044, 0.014, 0.006], "prob_new": [0.4936700463294983, 0.6304407119750977, 0.6634427905082703, 0.9580220580101013, 0.9864434003829956, 0.994110107421875], "prob_old": [0.8386658430099487, 0.6366181373596191, 0.5975901484489441, 0.5847734808921814, 0.5978913903236389, 0.6067728400230408], "prob_new_token": [4.630032890418079e-08, 0.0035079505760222673, 0.4185241758823395, 0.999904990196228, 0.9999598860740662, 0.9999397993087769], "prob_old_token": [0.2393854558467865, 0.04741286486387253, 0.32866600155830383, 1.4005904631630983e-06, 9.516078591786936e-08, 4.7414491888275734e-08], "l1-model.layers.2.mlp.down_proj.weight": [54772.9609375], "l2-model.layers.2.mlp.down_proj.weight": [9.295705795288086], "linf-model.layers.2.mlp.down_proj.weight": [0.0024863043799996376], "request": {"prompt": "{} originated in the location of", "subject": "Afrika Bambaataa", "target_new": {"str": "Liverpool, England"}, "old_answer": {"str": "the Bronx, New York"}, "seed": 42}}, {"loss_per_step": [9.839, 4.776, 2.125, 0.036, 0.014, 0.011, 0.009], "prob_new": [0.02675577811896801, 0.49877434968948364, 0.5070654153823853, 0.965549886226654, 0.9863059520721436, 0.9895980358123779, 0.9915256500244141], "prob_old": [0.8386658430099487, 0.5423994064331055, 0.5942337512969971, 0.5397812128067017, 0.5479598045349121, 0.5503301620483398, 0.5511548519134521], "prob_new_token": [5.3187978465985e-08, 7.121564703993499e-05, 0.014267044141888618, 0.9311570525169373, 0.9726386070251465, 0.9792158603668213, 0.9830673933029175], "prob_old_token": [0.2393854558467865, 0.36811769008636475, 0.7062462568283081, 0.02140902727842331, 0.006111282389611006, 0.003299935255199671, 0.0019835804123431444], "l1-model.layers.2.mlp.down_proj.weight": [62884.109375], "l2-model.layers.2.mlp.down_proj.weight": [10.501243591308594], "linf-model.layers.2.mlp.down_proj.weight": [0.002941785380244255], "request": {"prompt": "{} originated in the location of", "subject": "Afrika Bambaataa", "target_new": {"str": "Dubai"}, "old_answer": {"str": "the Bronx, New York"}, "seed": 42}}, {"loss_per_step": [4.409, 2.401, 1.205, 0.09, 0.056, 0.019, 0.009], "prob_new": [0.3512672781944275, 0.5921448469161987, 0.4193305969238281, 0.9146138429641724, 0.9460539221763611, 0.9813520312309265, 0.9909337759017944], "prob_old": [0.8386658430099487, 0.6323317885398865, 0.5308364629745483, 0.6302698850631714, 0.6424275040626526, 0.6486778855323792, 0.6520840525627136], "prob_new_token": [6.910008323757211e-06, 0.0009471402736380696, 0.09330867975950241, 0.8941028714179993, 0.924711287021637, 0.9800640344619751, 0.990700364112854], "prob_old_token": [0.2393854558467865, 0.16824153065681458, 0.4822651743888855, 0.009250014089047909, 0.0017882614629343152, 0.00017616293916944414, 2.6927444196189754e-05], "l1-model.layers.2.mlp.down_proj.weight": [58611.6484375], "l2-model.layers.2.mlp.down_proj.weight": [10.188118934631348], "linf-model.layers.2.mlp.down_proj.weight": [0.002967983018606901], "request": {"prompt": "{} originated in the location of", "subject": "Afrika Bambaataa", "target_new": {"str": "London, England"}, "old_answer": {"str": "the Bronx, New York"}, "seed": 42}}, {"loss_per_step": [4.094, 2.432, 0.489, 0.008], "prob_new": [0.7224992513656616, 0.7084214687347412, 0.7800589799880981, 0.9915688037872314], "prob_old": [0.935490071773529, 0.47664737701416016, 0.6473388075828552, 0.5740451216697693], "prob_new_token": [8.653818639459132e-08, 7.092947635101154e-05, 0.14513860642910004, 0.9817050099372864], "prob_old_token": [0.9617465138435364, 0.003282740479335189, 4.5135337131796405e-05, 1.194765809486853e-05], "l1-model.layers.2.mlp.down_proj.weight": [40300.69140625], "l2-model.layers.2.mlp.down_proj.weight": [6.710283279418945], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} originated in the location of", "subject": "The Beatles", "target_new": {"str": "Pittsburgh, Pennsylvania"}, "old_answer": {"str": "Liverpool, England"}, "seed": 42}}, {"loss_per_step": [5.274, 2.584, 0.116, 0.017, 0.011, 0.01, 0.01], "prob_new": [0.6193131804466248, 0.3284177780151367, 0.8973209857940674, 0.9827496409416199, 0.9886166453361511, 0.9898289442062378, 0.9902050495147705], "prob_old": [0.935490071773529, 0.2988043427467346, 0.639237642288208, 0.6468161940574646, 0.6480306386947632, 0.6474005579948425, 0.6442697048187256], "prob_new_token": [1.5577572298752784e-07, 0.004540923982858658, 0.7467560768127441, 0.9716503620147705, 0.9828347563743591, 0.9849262237548828, 0.9857150912284851], "prob_old_token": [0.9617465138435364, 0.003702029585838318, 0.00018801154510583729, 1.5700648873462342e-05, 5.3570279305859e-06, 3.1081397082743933e-06, 2.1167129489185754e-06], "l1-model.layers.2.mlp.down_proj.weight": [62447.3359375], "l2-model.layers.2.mlp.down_proj.weight": [10.553893089294434], "linf-model.layers.2.mlp.down_proj.weight": [0.003004297846928239], "request": {"prompt": "{} originated in the location of", "subject": "The Beatles", "target_new": {"str": "Seattle, Washington"}, "old_answer": {"str": "Liverpool, England"}, "seed": 42}}, {"loss_per_step": [5.208, 3.22, 0.512, 0.059, 0.035, 0.024, 0.019, 0.015, 0.012, 0.01, 0.009], "prob_new": [0.511428713798523, 0.252989798784256, 0.6727660894393921, 0.9428496360778809, 0.9657009840011597, 0.9761492013931274, 0.9815697073936462, 0.9851123094558716, 0.987696647644043, 0.9896224737167358, 0.9910940527915955], "prob_old": [0.935490071773529, 0.38708409667015076, 0.6313700079917908, 0.6197506189346313, 0.6117616891860962, 0.5985416173934937, 0.5791115760803223, 0.5535120964050293, 0.5237463116645813, 0.49440914392471313, 0.46858105063438416], "prob_new_token": [2.92156670411714e-07, 0.0018396181985735893, 0.2868596315383911, 0.9595732688903809, 0.9783565402030945, 0.9837132692337036, 0.9864805936813354, 0.988347053527832, 0.9896496534347534, 0.9905890226364136, 0.9913753867149353], "prob_old_token": [0.9617465138435364, 0.011959436349570751, 0.0009774075588211417, 4.634050856111571e-05, 2.2959431589697488e-05, 1.3020217011217028e-05, 7.068108516250504e-06, 3.737016641025548e-06, 2.0062529983988497e-06, 1.1023007573385257e-06, 6.155352707537531e-07], "l1-model.layers.2.mlp.down_proj.weight": [76416.90625], "l2-model.layers.2.mlp.down_proj.weight": [13.283199310302734], "linf-model.layers.2.mlp.down_proj.weight": [0.004909795708954334], "request": {"prompt": "{} originated in the location of", "subject": "The Beatles", "target_new": {"str": "Amsterdam, Netherlands"}, "old_answer": {"str": "Liverpool, England"}, "seed": 42}}, {"loss_per_step": [2.469, 2.06, 1.661, 0.159, 0.042, 0.019, 0.01], "prob_new": [0.5765365362167358, 0.5187070965766907, 0.6211563348770142, 0.8879197239875793, 0.9616986513137817, 0.982160210609436, 0.9905925989151001], "prob_old": [0.9823886156082153, 0.38289332389831543, 0.6771982908248901, 0.691422164440155, 0.692664623260498, 0.689357340335846, 0.6812679767608643], "prob_new_token": [0.00010937132174149156, 0.001204674132168293, 0.0021504813339561224, 0.4617229700088501, 0.8149341344833374, 0.9141169786453247, 0.9549768567085266], "prob_old_token": [0.9382753372192383, 1.025244637276046e-05, 1.213608447869774e-05, 0.00024756157654337585, 9.539398888591677e-05, 2.245223004138097e-05, 6.398127879947424e-06], "l1-model.layers.2.mlp.down_proj.weight": [61561.4140625], "l2-model.layers.2.mlp.down_proj.weight": [10.238066673278809], "linf-model.layers.2.mlp.down_proj.weight": [0.003002353012561798], "request": {"prompt": "{} originated in the location of", "subject": "NCR Corporation", "target_new": {"str": "Wichita, Kansas"}, "old_answer": {"str": "Dayton, Ohio"}, "seed": 42}}, {"loss_per_step": [2.37, 1.35, 0.268, 0.009], "prob_new": [0.6667643785476685, 0.7184879183769226, 0.8612765073776245, 0.9912411570549011], "prob_old": [0.9823886156082153, 0.43660157918930054, 0.5100106596946716, 0.6146964430809021], "prob_new_token": [0.005073836538940668, 0.0009399448754265904, 0.20926499366760254, 0.963842511177063], "prob_old_token": [0.9382753372192383, 7.626320893905358e-06, 1.690536919340957e-06, 3.75739688251997e-08], "l1-model.layers.2.mlp.down_proj.weight": [44779.25390625], "l2-model.layers.2.mlp.down_proj.weight": [7.047390460968018], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024729073047638], "request": {"prompt": "{} originated in the location of", "subject": "NCR Corporation", "target_new": {"str": "Kuala Lumpur"}, "old_answer": {"str": "Dayton, Ohio"}, "seed": 42}}, {"loss_per_step": [3.575, 1.554, 0.291, 0.033, 0.021, 0.012, 0.007], "prob_new": [0.7329834699630737, 0.5761997699737549, 0.8180956840515137, 0.9680850505828857, 0.979541540145874, 0.9885135293006897, 0.9933033585548401], "prob_old": [0.9823886156082153, 0.3919186294078827, 0.6135660409927368, 0.6666649580001831, 0.6552956104278564, 0.6415926814079285, 0.627456545829773], "prob_new_token": [6.593050443370885e-07, 0.005798629485070705, 0.3316769599914551, 0.9553810954093933, 0.9831914901733398, 0.992585301399231, 0.9960624575614929], "prob_old_token": [0.9382753372192383, 8.46589136926923e-06, 1.6423480246885447e-06, 7.50406570659834e-07, 1.717104112231027e-07, 5.453389562148914e-08, 2.2583448000546014e-08], "l1-model.layers.2.mlp.down_proj.weight": [66693.09375], "l2-model.layers.2.mlp.down_proj.weight": [10.672218322753906], "linf-model.layers.2.mlp.down_proj.weight": [0.003001914359629154], "request": {"prompt": "{} originated in the location of", "subject": "NCR Corporation", "target_new": {"str": "Los Angeles, California"}, "old_answer": {"str": "Dayton, Ohio"}, "seed": 42}}, {"loss_per_step": [9.105, 4.956, 1.728, 0.077, 0.006], "prob_new": [0.48868343234062195, 0.4714040756225586, 0.5009020566940308, 0.9282735586166382, 0.9940658807754517], "prob_old": [0.9287442564964294, 0.0023991428315639496, 0.007273669820278883, 0.0028595500625669956, 0.00014657722203992307], "prob_new_token": [1.2627444867518989e-08, 5.2617499022744596e-05, 0.03256722912192345, 0.858653724193573, 0.9894611239433289], "prob_old_token": [0.9287442564964294, 0.0023991428315639496, 0.007273669820278883, 0.0028595500625669956, 0.00014657722203992307], "l1-model.layers.2.mlp.down_proj.weight": [48964.1171875], "l2-model.layers.2.mlp.down_proj.weight": [8.143290519714355], "linf-model.layers.2.mlp.down_proj.weight": [0.002005399204790592], "request": {"prompt": "{} originated in the location of", "subject": "Agence France-Presse", "target_new": {"str": "Karachi"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [6.355, 2.542, 0.047, 0.003], "prob_new": [0.4999871551990509, 0.4995366334915161, 0.9554516673088074, 0.997124195098877], "prob_old": [0.9287442564964294, 0.002699923701584339, 0.017961520701646805, 0.0007954941247589886], "prob_new_token": [3.0235678423196077e-06, 0.006234031170606613, 0.9109983444213867, 0.994289755821228], "prob_old_token": [0.9287442564964294, 0.002699923701584339, 0.017961520701646805, 0.0007954941247589886], "l1-model.layers.2.mlp.down_proj.weight": [36872.4453125], "l2-model.layers.2.mlp.down_proj.weight": [6.5367350578308105], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024589374661446], "request": {"prompt": "{} originated in the location of", "subject": "Agence France-Presse", "target_new": {"str": "Prague"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [3.103, 1.805, 0.072, 0.004], "prob_new": [0.7047834396362305, 0.6608124375343323, 0.9340499639511108, 0.9956461787223816], "prob_old": [0.9287442564964294, 0.020731579512357712, 0.001755740842781961, 2.7117863282910548e-05], "prob_new_token": [4.965027073922101e-06, 0.0011335433227941394, 0.7951095104217529, 0.9983323216438293], "prob_old_token": [0.9287442564964294, 0.020731579512357712, 0.001755740842781961, 2.7117863282910548e-05], "l1-model.layers.2.mlp.down_proj.weight": [36213.765625], "l2-model.layers.2.mlp.down_proj.weight": [6.472629070281982], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024654567241669], "request": {"prompt": "{} originated in the location of", "subject": "Agence France-Presse", "target_new": {"str": "Warsaw, Poland"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [9.015, 4.079, 2.384, 0.363, 0.064, 0.024, 0.014, 0.009], "prob_new": [0.33264023065567017, 0.2871086001396179, 0.46165579557418823, 0.7775371074676514, 0.9410845041275024, 0.9766642451286316, 0.9859930276870728, 0.9909292459487915], "prob_old": [0.885715126991272, 0.282136470079422, 0.2949132025241852, 0.3366234302520752, 0.3637487292289734, 0.3668999671936035, 0.3608419895172119, 0.3546006679534912], "prob_new_token": [8.058722187342937e-07, 0.000520505360327661, 0.002024975838139653, 0.3387424945831299, 0.8311910629272461, 0.9359927177429199, 0.9626885652542114, 0.9767318367958069], "prob_old_token": [0.964544415473938, 7.761714368825778e-05, 3.2123076380230486e-05, 5.138024062034674e-05, 0.00013097675400786102, 0.00014444533735513687, 9.982076153391972e-05, 5.4822339734528214e-05], "l1-model.layers.2.mlp.down_proj.weight": [67319.0390625], "l2-model.layers.2.mlp.down_proj.weight": [11.35476016998291], "linf-model.layers.2.mlp.down_proj.weight": [0.0034245848655700684], "request": {"prompt": "{} originated in the location of", "subject": "Kasabian", "target_new": {"str": "Darmstadt"}, "old_answer": {"str": "Leicester, England"}, "seed": 42}}, {"loss_per_step": [5.521, 0.061, 0.0], "prob_new": [0.6169248223304749, 0.943950355052948, 0.9998244047164917], "prob_old": [0.885715126991272, 0.34329983592033386, 0.3023483455181122], "prob_new_token": [7.496029752473987e-08, 0.8332324624061584, 0.9996515512466431], "prob_old_token": [0.964544415473938, 0.0003739616076927632, 1.7870098645289545e-07], "l1-model.layers.2.mlp.down_proj.weight": [36828.08203125], "l2-model.layers.2.mlp.down_proj.weight": [5.543488025665283], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} originated in the location of", "subject": "Kasabian", "target_new": {"str": "Waterloo"}, "old_answer": {"str": "Leicester, England"}, "seed": 42}}, {"loss_per_step": [5.182, 3.657, 1.097, 0.042, 0.008], "prob_new": [0.4350667893886566, 0.6752117872238159, 0.7373781800270081, 0.9607998728752136, 0.9920952320098877], "prob_old": [0.885715126991272, 0.3138970732688904, 0.3028775453567505, 0.39048469066619873, 0.4140246510505676], "prob_new_token": [1.0832804875349211e-08, 6.265285037443391e-07, 0.013250689953565598, 0.866761326789856, 0.9829745888710022], "prob_old_token": [0.964544415473938, 2.5887771698762663e-05, 0.00016259271069429815, 6.653470336459577e-06, 1.759076724283659e-07], "l1-model.layers.2.mlp.down_proj.weight": [52652.125], "l2-model.layers.2.mlp.down_proj.weight": [8.410422325134277], "linf-model.layers.2.mlp.down_proj.weight": [0.0020053908228874207], "request": {"prompt": "{} originated in the location of", "subject": "Kasabian", "target_new": {"str": "Addis Ababa"}, "old_answer": {"str": "Leicester, England"}, "seed": 42}}, {"loss_per_step": [2.498, 0.135, 0.008], "prob_new": [0.48936542868614197, 0.8781630396842957, 0.9918583631515503], "prob_old": [0.9831567406654358, 0.40383827686309814, 0.384128212928772], "prob_new_token": [0.001188071328215301, 0.7903828024864197, 0.9967391490936279], "prob_old_token": [0.9693801999092102, 4.7219711518664553e-07, 9.613579932121752e-10], "l1-model.layers.2.mlp.down_proj.weight": [36881.1796875], "l2-model.layers.2.mlp.down_proj.weight": [5.544440269470215], "linf-model.layers.2.mlp.down_proj.weight": [0.001000677701085806], "request": {"prompt": "{} originated in the location of", "subject": "NSYNC", "target_new": {"str": "the United States"}, "old_answer": {"str": "Orlando, Florida"}, "seed": 42}}, {"loss_per_step": [3.186, 1.199, 0.002], "prob_new": [0.6265522837638855, 0.781002938747406, 0.9983940124511719], "prob_old": [0.9831567406654358, 0.5062642693519592, 0.5520468950271606], "prob_new_token": [3.56406900436923e-07, 0.0027491713408380747, 0.9979956150054932], "prob_old_token": [0.9693801999092102, 3.6353796417643025e-07, 2.8478526203912224e-09], "l1-model.layers.2.mlp.down_proj.weight": [32865.66015625], "l2-model.layers.2.mlp.down_proj.weight": [5.213677883148193], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} originated in the location of", "subject": "NSYNC", "target_new": {"str": "Sheffield, England"}, "old_answer": {"str": "Orlando, Florida"}, "seed": 42}}, {"loss_per_step": [5.082, 0.518, 0.048, 0.019, 0.009], "prob_new": [0.5431682467460632, 0.6670514941215515, 0.953533411026001, 0.9808293581008911, 0.9909560680389404], "prob_old": [0.9831567406654358, 0.49888139963150024, 0.45152655243873596, 0.4668448269367218, 0.459768682718277], "prob_new_token": [3.603362017656764e-07, 0.28836384415626526, 0.9974567294120789, 0.9986143112182617, 0.9989029169082642], "prob_old_token": [0.9693801999092102, 4.350699782662559e-06, 2.5673021042393884e-08, 1.2089847345464477e-08, 7.576001159748103e-09], "l1-model.layers.2.mlp.down_proj.weight": [52122.1015625], "l2-model.layers.2.mlp.down_proj.weight": [8.444568634033203], "linf-model.layers.2.mlp.down_proj.weight": [0.002002354711294174], "request": {"prompt": "{} originated in the location of", "subject": "NSYNC", "target_new": {"str": "Liverpool, England"}, "old_answer": {"str": "Orlando, Florida"}, "seed": 42}}, {"loss_per_step": [3.678, 2.184, 0.45, 0.009], "prob_new": [0.6108620762825012, 0.5021945834159851, 0.8013269305229187, 0.9911052584648132], "prob_old": [0.9499672055244446, 0.287828654050827, 0.3171505928039551, 0.26392388343811035], "prob_new_token": [7.14092891485052e-08, 0.00027303912793286145, 0.11854219436645508, 0.9859248995780945], "prob_old_token": [0.9563567042350769, 4.449971584108425e-06, 8.501076081302017e-06, 6.902865266056324e-07], "l1-model.layers.2.mlp.down_proj.weight": [40271.11328125], "l2-model.layers.2.mlp.down_proj.weight": [6.7524566650390625], "linf-model.layers.2.mlp.down_proj.weight": [0.001502479426562786], "request": {"prompt": "{} originated in the location of", "subject": "Soul Asylum", "target_new": {"str": "Abu Dhabi"}, "old_answer": {"str": "Minneapolis, Minnesota"}, "seed": 42}}, {"loss_per_step": [11.398, 5.349, 1.644, 0.003], "prob_new": [0.004258742090314627, 0.4936315417289734, 0.5185374617576599, 0.9966574907302856], "prob_old": [0.9499672055244446, 0.3990095555782318, 0.4171099364757538, 0.4184196889400482], "prob_new_token": [1.4788795077436134e-08, 2.288818359375e-05, 0.03736446797847748, 0.9934000372886658], "prob_old_token": [0.9563567042350769, 9.334959031548351e-06, 9.005631000036374e-06, 1.453773279536108e-08], "l1-model.layers.2.mlp.down_proj.weight": [42471.359375], "l2-model.layers.2.mlp.down_proj.weight": [6.867022514343262], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} originated in the location of", "subject": "Soul Asylum", "target_new": {"str": "Dubai"}, "old_answer": {"str": "Minneapolis, Minnesota"}, "seed": 42}}, {"loss_per_step": [3.201, 0.245, 0.009], "prob_new": [0.6795846223831177, 0.8159583210945129, 0.991435170173645], "prob_old": [0.9499672055244446, 0.591109573841095, 0.680367112159729], "prob_new_token": [3.7760155464638956e-06, 0.4626133143901825, 0.9885452389717102], "prob_old_token": [0.9563567042350769, 8.99145015864633e-05, 2.904372195189353e-06], "l1-model.layers.2.mlp.down_proj.weight": [35896.203125], "l2-model.layers.2.mlp.down_proj.weight": [5.464863300323486], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} originated in the location of", "subject": "Soul Asylum", "target_new": {"str": "Oslo, Norway"}, "old_answer": {"str": "Minneapolis, Minnesota"}, "seed": 42}}, {"loss_per_step": [2.893, 1.618, 0.111, 0.052, 0.029, 0.017, 0.011, 0.008], "prob_new": [0.24618612229824066, 0.5718895792961121, 0.89752596616745, 0.9502806663513184, 0.9715844988822937, 0.9831234216690063, 0.9889273643493652, 0.9920786619186401], "prob_old": [0.7216858863830566, 0.27101802825927734, 0.22478517889976501, 0.23221758008003235, 0.23713895678520203, 0.24081292748451233, 0.24509957432746887, 0.2498759925365448], "prob_new_token": [0.010925659909844398, 0.711508572101593, 0.8489953875541687, 0.9212182760238647, 0.9574292302131653, 0.9780339598655701, 0.987608790397644, 0.9921284317970276], "prob_old_token": [0.932471752166748, 5.410097219282761e-05, 2.430845427170425e-07, 2.6822242205071234e-08, 9.631368591556111e-09, 4.361997429924713e-09, 2.3874484611496882e-09, 1.5244270290537543e-09], "l1-model.layers.2.mlp.down_proj.weight": [66547.625], "l2-model.layers.2.mlp.down_proj.weight": [11.302495002746582], "linf-model.layers.2.mlp.down_proj.weight": [0.0034928098320961], "request": {"prompt": "{} originated in the location of", "subject": "Ernst & Young", "target_new": {"str": "the United States"}, "old_answer": {"str": "London, England"}, "seed": 42}}, {"loss_per_step": [3.226, 0.728, 0.007], "prob_new": [0.5987337827682495, 0.6992595195770264, 0.9931256771087646], "prob_old": [0.7216858863830566, 0.21735075116157532, 0.26685506105422974], "prob_new_token": [6.115264113759622e-06, 0.07394508272409439, 0.978247880935669], "prob_old_token": [0.932471752166748, 1.061302737070946e-05, 2.2738822735846043e-07], "l1-model.layers.2.mlp.down_proj.weight": [33280.3203125], "l2-model.layers.2.mlp.down_proj.weight": [5.244772911071777], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} originated in the location of", "subject": "Ernst & Young", "target_new": {"str": "Oslo, Norway"}, "old_answer": {"str": "London, England"}, "seed": 42}}, {"loss_per_step": [5.604, 3.397, 1.433, 0.007], "prob_new": [0.4999963343143463, 0.5002442002296448, 0.5284303426742554, 0.9930738210678101], "prob_old": [0.7216858863830566, 0.2281126230955124, 0.22175973653793335, 0.16591233015060425], "prob_new_token": [1.3577751815319061e-05, 0.0011216136626899242, 0.056928858160972595, 0.9862143397331238], "prob_old_token": [0.932471752166748, 0.00016070625861175358, 2.0794632291654125e-06, 1.4143175803837948e-06], "l1-model.layers.2.mlp.down_proj.weight": [39410.46875], "l2-model.layers.2.mlp.down_proj.weight": [6.628626823425293], "linf-model.layers.2.mlp.down_proj.weight": [0.001502485480159521], "request": {"prompt": "{} originated in the location of", "subject": "Ernst & Young", "target_new": {"str": "Prague"}, "old_answer": {"str": "London, England"}, "seed": 42}}, {"loss_per_step": [8.3, 3.489, 0.269, 0.012, 0.009], "prob_new": [0.33306828141212463, 0.3371787965297699, 0.7922326922416687, 0.9879223108291626, 0.9911713004112244], "prob_old": [0.9480205774307251, 0.3829289376735687, 0.38594064116477966, 0.520113468170166, 0.5510454773902893], "prob_new_token": [1.966561512745102e-06, 0.0016794676193967462, 0.5237138867378235, 0.96499103307724, 0.9744850993156433], "prob_old_token": [0.9612312316894531, 5.254446023172932e-06, 0.0004772006650455296, 9.242808300768957e-05, 4.638379323296249e-05], "l1-model.layers.2.mlp.down_proj.weight": [50966.078125], "l2-model.layers.2.mlp.down_proj.weight": [8.360611915588379], "linf-model.layers.2.mlp.down_proj.weight": [0.0020014140754938126], "request": {"prompt": "{} originated in the location of", "subject": "Jefferson Airplane", "target_new": {"str": "Xiamen"}, "old_answer": {"str": "San Francisco, California"}, "seed": 42}}, {"loss_per_step": [7.383, 4.44, 1.097, 0.055, 0.012, 0.006], "prob_new": [0.3310156464576721, 0.33268338441848755, 0.6480326652526855, 0.9494764804840088, 0.9882409572601318, 0.9936317801475525], "prob_old": [0.9480205774307251, 0.3080984950065613, 0.5640782117843628, 0.4197184145450592, 0.351421058177948, 0.3498227596282959], "prob_new_token": [1.6987021922432177e-07, 9.269033762393519e-05, 0.041275933384895325, 0.8489184975624084, 0.9650898575782776, 0.9812043309211731], "prob_old_token": [0.9612312316894531, 1.866746970335953e-05, 0.00016004811914172024, 2.9699331207666546e-05, 5.63141065867967e-06, 2.366655280638952e-06], "l1-model.layers.2.mlp.down_proj.weight": [52471.4921875], "l2-model.layers.2.mlp.down_proj.weight": [9.079641342163086], "linf-model.layers.2.mlp.down_proj.weight": [0.0024882755242288113], "request": {"prompt": "{} originated in the location of", "subject": "Jefferson Airplane", "target_new": {"str": "Enschede"}, "old_answer": {"str": "San Francisco, California"}, "seed": 42}}, {"loss_per_step": [4.438, 1.197, 0.039, 0.012, 0.005], "prob_new": [0.5205732583999634, 0.6837759017944336, 0.9616221189498901, 0.9884176254272461, 0.9951239824295044], "prob_old": [0.9480205774307251, 0.4256715178489685, 0.4531143307685852, 0.46685054898262024, 0.4951696991920471], "prob_new_token": [1.0344096779135725e-07, 0.011133762076497078, 0.9563952088356018, 0.9897565245628357, 0.9953699707984924], "prob_old_token": [0.9612312316894531, 0.0005371421575546265, 2.526493881305214e-05, 5.609317213384202e-06, 2.0812212824239396e-06], "l1-model.layers.2.mlp.down_proj.weight": [48608.47265625], "l2-model.layers.2.mlp.down_proj.weight": [8.256946563720703], "linf-model.layers.2.mlp.down_proj.weight": [0.0020039156079292297], "request": {"prompt": "{} originated in the location of", "subject": "Jefferson Airplane", "target_new": {"str": "Orlando, Florida"}, "old_answer": {"str": "San Francisco, California"}, "seed": 42}}, {"loss_per_step": [3.749, 0.991, 0.008], "prob_new": [0.4926019310951233, 0.6443887948989868, 0.992144763469696], "prob_old": [0.9646366834640503, 0.059364914894104004, 0.055038195103406906], "prob_new_token": [9.42859870178836e-08, 0.01954507641494274, 0.9823698997497559], "prob_old_token": [0.9296548962593079, 1.1635073860816192e-05, 4.71265195756132e-08], "l1-model.layers.2.mlp.down_proj.weight": [32157.548828125], "l2-model.layers.2.mlp.down_proj.weight": [5.152279853820801], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} originated in the location of", "subject": "Dir En Grey", "target_new": {"str": "Sheffield, England"}, "old_answer": {"str": "Osaka"}, "seed": 42}}, {"loss_per_step": [4.241, 1.434, 0.057, 0.006], "prob_new": [0.3237515389919281, 0.2987495958805084, 0.945219874382019, 0.9940924048423767], "prob_old": [0.9646366834640503, 0.1615028828382492, 0.44874319434165955, 0.47281691431999207], "prob_new_token": [0.0009045587503351271, 0.24348951876163483, 0.9560635685920715, 0.9889751076698303], "prob_old_token": [0.9296548962593079, 2.627418496103928e-07, 4.999977321062943e-09, 1.3140999399752218e-10], "l1-model.layers.2.mlp.down_proj.weight": [39995.046875], "l2-model.layers.2.mlp.down_proj.weight": [6.773797988891602], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024300664663315], "request": {"prompt": "{} originated in the location of", "subject": "Dir En Grey", "target_new": {"str": "the United States"}, "old_answer": {"str": "Osaka"}, "seed": 42}}, {"loss_per_step": [3.928, 2.48, 0.356, 0.204, 0.004], "prob_new": [0.7114431858062744, 0.6071547269821167, 0.8082213401794434, 0.8568097352981567, 0.9959486722946167], "prob_old": [0.9646366834640503, 0.00013663744903169572, 0.12191418558359146, 0.22805769741535187, 0.4993620216846466], "prob_new_token": [0.9296548962593079, 9.535314893582836e-05, 0.2438204288482666, 0.4561133086681366, 0.9987239837646484], "prob_old_token": [0.9296548962593079, 9.535314893582836e-05, 0.2438204288482666, 0.4561133086681366, 0.9987239837646484], "l1-model.layers.2.mlp.down_proj.weight": [44309.76171875], "l2-model.layers.2.mlp.down_proj.weight": [7.749877452850342], "linf-model.layers.2.mlp.down_proj.weight": [0.00200575590133667], "request": {"prompt": "{} originated in the location of", "subject": "Dir En Grey", "target_new": {"str": "Oslo, Norway"}, "old_answer": {"str": "Osaka"}, "seed": 42}}, {"loss_per_step": [6.417, 0.537, 0.235, 0.026, 0.019, 0.014, 0.012, 0.01], "prob_new": [0.0028101534117013216, 0.5851037502288818, 0.8120530843734741, 0.9745103120803833, 0.9816645383834839, 0.9858464002609253, 0.9883800745010376, 0.990237295627594], "prob_old": [0.9929017424583435, 0.3322882652282715, 0.3598984479904175, 0.32627737522125244, 0.3138532042503357, 0.31179407238960266, 0.31144729256629944, 0.3108617067337036], "prob_new_token": [0.0005238910089246929, 0.5633194446563721, 0.6261799335479736, 0.9516890645027161, 0.9675765037536621, 0.9773208498954773, 0.9824844598770142, 0.9855409264564514], "prob_old_token": [0.9809134602546692, 0.0006186390528455377, 0.00014552188804373145, 8.62922752276063e-06, 7.6269534474704415e-06, 9.524940651317593e-06, 1.2371281627565622e-05, 1.4264523997553624e-05], "l1-model.layers.2.mlp.down_proj.weight": [72359.5625], "l2-model.layers.2.mlp.down_proj.weight": [11.573583602905273], "linf-model.layers.2.mlp.down_proj.weight": [0.0034344736486673355], "request": {"prompt": "{} originated in the location of", "subject": "Merck KGaA", "target_new": {"str": "the Netherlands"}, "old_answer": {"str": "Darmstadt"}, "seed": 42}}, {"loss_per_step": [4.458, 1.446, 1.673, 0.173, 0.013, 0.006], "prob_new": [0.2909415662288666, 0.5311577916145325, 0.3869825601577759, 0.8595030903816223, 0.9866684675216675, 0.993820071220398], "prob_old": [0.9929017424583435, 0.602439284324646, 0.04889082536101341, 0.07040461897850037, 0.047676630318164825, 0.09253080934286118], "prob_new_token": [2.088239170916495e-06, 0.00915063451975584, 0.010257141664624214, 0.5827096104621887, 0.9739667177200317, 0.9867331385612488], "prob_old_token": [0.9809134602546692, 0.009892911650240421, 0.0011837396305054426, 0.0011603615712374449, 0.00046436244156211615, 0.00013334558752831072], "l1-model.layers.2.mlp.down_proj.weight": [49530.4765625], "l2-model.layers.2.mlp.down_proj.weight": [8.758201599121094], "linf-model.layers.2.mlp.down_proj.weight": [0.0024748891592025757], "request": {"prompt": "{} originated in the location of", "subject": "Merck KGaA", "target_new": {"str": "Bristol, England"}, "old_answer": {"str": "Darmstadt"}, "seed": 42}}, {"loss_per_step": [4.096, 2.55, 0.922, 0.137, 0.031, 0.013, 0.008], "prob_new": [0.419710248708725, 0.5090184211730957, 0.7837408781051636, 0.8905772566795349, 0.9699804186820984, 0.9870811700820923, 0.9920400977134705], "prob_old": [0.9929017424583435, 0.27615663409233093, 0.29073646664619446, 0.13287240266799927, 0.08578896522521973, 0.07279617339372635, 0.06548179686069489], "prob_new_token": [2.185446312807926e-08, 2.9476164854713716e-05, 0.005487328860908747, 0.5455008745193481, 0.9026426076889038, 0.9702549576759338, 0.9861863851547241], "prob_old_token": [0.9809134602546692, 0.0008184736361727118, 0.00034283933928236365, 0.0004839272587560117, 0.00013049834524281323, 5.962457726127468e-05, 3.7557616451522335e-05], "l1-model.layers.2.mlp.down_proj.weight": [64887.76171875], "l2-model.layers.2.mlp.down_proj.weight": [10.555550575256348], "linf-model.layers.2.mlp.down_proj.weight": [0.0029579661786556244], "request": {"prompt": "{} originated in the location of", "subject": "Merck KGaA", "target_new": {"str": "Edmonton, Alberta"}, "old_answer": {"str": "Darmstadt"}, "seed": 42}}, {"loss_per_step": [7.335, 4.189, 0.203, 0.001], "prob_new": [0.47489795088768005, 0.24353152513504028, 0.8329076766967773, 0.9992153644561768], "prob_old": [0.967634916305542, 0.2643773853778839, 0.4056682884693146, 0.42536550760269165], "prob_new_token": [4.4769953433387855e-07, 0.00047254672972485423, 0.665908694267273, 0.9984539151191711], "prob_old_token": [0.9353460669517517, 0.00048666485236026347, 0.0006860034773126245, 6.268632546380104e-07], "l1-model.layers.2.mlp.down_proj.weight": [35702.4375], "l2-model.layers.2.mlp.down_proj.weight": [6.418842792510986], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} originated in the location of", "subject": "Kaiser Chiefs", "target_new": {"str": "Dubai"}, "old_answer": {"str": "Leeds"}, "seed": 42}}, {"loss_per_step": [5.452, 3.647, 1.149, 0.034, 0.008], "prob_new": [0.6263803839683533, 0.6560396552085876, 0.6599853038787842, 0.9673635363578796, 0.9919271469116211], "prob_old": [0.967634916305542, 0.4139820337295532, 0.2836269438266754, 0.06356968730688095, 0.05670374259352684], "prob_new_token": [8.92778828642804e-08, 1.8276456103194505e-05, 0.03369602560997009, 0.9032790660858154, 0.9765244722366333], "prob_old_token": [0.9353460669517517, 0.0010214716894552112, 0.0030379649251699448, 0.0007903588120825589, 0.00021188742539379746], "l1-model.layers.2.mlp.down_proj.weight": [47875.703125], "l2-model.layers.2.mlp.down_proj.weight": [8.093746185302734], "linf-model.layers.2.mlp.down_proj.weight": [0.0020053198095411062], "request": {"prompt": "{} originated in the location of", "subject": "Kaiser Chiefs", "target_new": {"str": "Waterloo"}, "old_answer": {"str": "Leeds"}, "seed": 42}}, {"loss_per_step": [8.653, 4.256, 1.684, 0.047, 0.011, 0.004], "prob_new": [0.002455777022987604, 0.38907957077026367, 0.5150699615478516, 0.9547768831253052, 0.9891586303710938, 0.9960551261901855], "prob_old": [0.967634916305542, 0.012759802863001823, 0.1341261863708496, 0.36268267035484314, 0.3005004823207855, 0.20453105866909027], "prob_new_token": [6.216567271621898e-06, 0.00025822193128988147, 0.03463218733668327, 0.9210723042488098, 0.9901960492134094, 0.9968093633651733], "prob_old_token": [0.9353460669517517, 1.371686903439695e-05, 2.0125145965721458e-05, 2.4662007490405813e-05, 3.996655323135201e-06, 9.934202580552665e-07], "l1-model.layers.2.mlp.down_proj.weight": [56140.765625], "l2-model.layers.2.mlp.down_proj.weight": [9.43114185333252], "linf-model.layers.2.mlp.down_proj.weight": [0.002487180521711707], "request": {"prompt": "{} originated in the location of", "subject": "Kaiser Chiefs", "target_new": {"str": "Osaka"}, "old_answer": {"str": "Leeds"}, "seed": 42}}, {"loss_per_step": [9.193, 3.764, 0.386, 0.125, 0.047, 0.023, 0.013, 0.008], "prob_new": [0.001624158350750804, 0.28050756454467773, 0.713215172290802, 0.887334406375885, 0.954429030418396, 0.9777190685272217, 0.9871876239776611, 0.9917099475860596], "prob_old": [0.9498742818832397, 0.17802029848098755, 0.3009463846683502, 0.3027104139328003, 0.30316656827926636, 0.3076702356338501, 0.31285351514816284, 0.3173021376132965], "prob_new_token": [0.0032451285514980555, 0.5600544810295105, 0.49698707461357117, 0.7985532879829407, 0.9251403212547302, 0.9656661748886108, 0.981011152267456, 0.9881191253662109], "prob_old_token": [0.9679225087165833, 0.0005692738341167569, 0.0007604000857099891, 0.0003328174352645874, 0.00011367590923327953, 4.407519372762181e-05, 2.0717759980470873e-05, 1.1041319339710753e-05], "l1-model.layers.2.mlp.down_proj.weight": [67379.6875], "l2-model.layers.2.mlp.down_proj.weight": [11.39227294921875], "linf-model.layers.2.mlp.down_proj.weight": [0.0034699421375989914], "request": {"prompt": "{} originated in the location of", "subject": "Black Sabbath", "target_new": {"str": "the Netherlands"}, "old_answer": {"str": "Birmingham, England"}, "seed": 42}}, {"loss_per_step": [2.819, 0.021, 0.004], "prob_new": [0.5034815073013306, 0.9792689681053162, 0.996250331401825], "prob_old": [0.9498742818832397, 0.5953869819641113, 0.589356541633606], "prob_new_token": [0.00023302703630179167, 0.9971345663070679, 0.9969034194946289], "prob_old_token": [0.9679225087165833, 1.1924562386411708e-05, 4.277933385310462e-06], "l1-model.layers.2.mlp.down_proj.weight": [37284.921875], "l2-model.layers.2.mlp.down_proj.weight": [5.577340602874756], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006758384406567], "request": {"prompt": "{} originated in the location of", "subject": "Black Sabbath", "target_new": {"str": "Oslo, Norway"}, "old_answer": {"str": "Birmingham, England"}, "seed": 42}}, {"loss_per_step": [4.511, 1.431, 0.102, 0.004], "prob_new": [0.49969276785850525, 0.6349222660064697, 0.907565712928772, 0.9956235885620117], "prob_old": [0.9498742818832397, 0.5257240533828735, 0.46497803926467896, 0.52808678150177], "prob_new_token": [4.8690872063161805e-06, 0.006088644731789827, 0.8508948683738708, 0.9986770153045654], "prob_old_token": [0.9679225087165833, 0.010659491643309593, 0.002579503459855914, 5.203209184401203e-06], "l1-model.layers.2.mlp.down_proj.weight": [39571.5546875], "l2-model.layers.2.mlp.down_proj.weight": [6.674797534942627], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} originated in the location of", "subject": "Black Sabbath", "target_new": {"str": "Eindhoven"}, "old_answer": {"str": "Birmingham, England"}, "seed": 42}}, {"loss_per_step": [3.874, 1.382, 0.621, 0.014, 0.009], "prob_new": [0.4888095557689667, 0.6175380945205688, 0.6911756992340088, 0.9858565330505371, 0.9915244579315186], "prob_old": [0.9447222948074341, 0.6448280811309814, 0.4350426495075226, 0.704977810382843, 0.7339146137237549], "prob_new_token": [3.802670789809781e-06, 0.007449703756719828, 0.1251949965953827, 0.9690554738044739, 0.9733171463012695], "prob_old_token": [0.912537693977356, 0.0913700982928276, 0.008719551376998425, 0.0033981825690716505, 0.0016666649607941508], "l1-model.layers.2.mlp.down_proj.weight": [48306.4140625], "l2-model.layers.2.mlp.down_proj.weight": [8.085227966308594], "linf-model.layers.2.mlp.down_proj.weight": [0.0020053796470165253], "request": {"prompt": "{} originated in the location of", "subject": "LOT Polish Airlines", "target_new": {"str": "Orlando, Florida"}, "old_answer": {"str": "Warsaw, Poland"}, "seed": 42}}, {"loss_per_step": [8.533, 5.713, 2.874, 1.869, 0.039, 0.006], "prob_new": [0.4910006523132324, 0.48331546783447266, 0.488917738199234, 0.47862333059310913, 0.9625495672225952, 0.9944980144500732], "prob_old": [0.9447222948074341, 0.5282503366470337, 0.3818013668060303, 0.340387225151062, 0.35740408301353455, 0.3629570007324219], "prob_new_token": [3.948721527535781e-08, 1.1276799341430888e-05, 0.0032745280768722296, 0.025543374940752983, 0.9328839182853699, 0.9938644766807556], "prob_old_token": [0.912537693977356, 0.008814748376607895, 0.0269060879945755, 0.019881632179021835, 0.000559806008823216, 1.1979467672063038e-05], "l1-model.layers.2.mlp.down_proj.weight": [55084.921875], "l2-model.layers.2.mlp.down_proj.weight": [9.24632740020752], "linf-model.layers.2.mlp.down_proj.weight": [0.002508208155632019], "request": {"prompt": "{} originated in the location of", "subject": "LOT Polish Airlines", "target_new": {"str": "Dubai"}, "old_answer": {"str": "Warsaw, Poland"}, "seed": 42}}, {"loss_per_step": [3.546, 0.918, 0.041, 0.008], "prob_new": [0.17808708548545837, 0.49402332305908203, 0.9606120586395264, 0.9916472434997559], "prob_old": [0.9447222948074341, 0.6325198411941528, 0.7038305401802063, 0.7268423438072205], "prob_new_token": [0.0032579628750681877, 0.15310341119766235, 0.8972415328025818, 0.9854205846786499], "prob_old_token": [0.912537693977356, 0.09746649116277695, 3.5355682484805584e-05, 9.462239063395828e-07], "l1-model.layers.2.mlp.down_proj.weight": [44900.1875], "l2-model.layers.2.mlp.down_proj.weight": [7.081678867340088], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024608001112938], "request": {"prompt": "{} originated in the location of", "subject": "LOT Polish Airlines", "target_new": {"str": "the United States"}, "old_answer": {"str": "Warsaw, Poland"}, "seed": 42}}, {"loss_per_step": [3.286, 1.407, 0.44, 0.005], "prob_new": [0.5558529496192932, 0.6689400672912598, 0.8196640014648438, 0.9952298402786255], "prob_old": [0.9995423555374146, 0.460396409034729, 0.6223388910293579, 0.5685341358184814], "prob_new_token": [1.0674291388568236e-06, 0.0025401029270142317, 0.11225077509880066, 0.9797585010528564], "prob_old_token": [0.998908519744873, 0.0028572247829288244, 0.005376688204705715, 0.00010514356108615175], "l1-model.layers.2.mlp.down_proj.weight": [38653.796875], "l2-model.layers.2.mlp.down_proj.weight": [6.602942943572998], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "India", "target_new": {"str": "Rastafarianism"}, "old_answer": {"str": "Hinduism"}, "seed": 42}}, {"loss_per_step": [5.387, 1.171, 0.077, 0.03, 0.033, 0.032, 0.025, 0.018, 0.012, 0.009], "prob_new": [0.3299197852611542, 0.6307789087295532, 0.9279190897941589, 0.9705427885055542, 0.96797776222229, 0.9689931869506836, 0.9755098223686218, 0.9824233055114746, 0.9877318739891052, 0.9912971258163452], "prob_old": [0.9995423555374146, 0.639443039894104, 0.5974790453910828, 0.35284531116485596, 0.39561089873313904, 0.38568180799484253, 0.37800443172454834, 0.37352046370506287, 0.3703556954860687, 0.3676006495952606], "prob_new_token": [8.107507710519712e-06, 0.03462596982717514, 0.8363093733787537, 0.9723755717277527, 0.9369567036628723, 0.931938111782074, 0.9443309903144836, 0.9600117802619934, 0.9727002382278442, 0.9812975525856018], "prob_old_token": [0.998908519744873, 0.021171659231185913, 0.006296565290540457, 0.00031779627897776663, 0.0011680886382237077, 0.0012754924828186631, 0.00101652427110821, 0.0007296745898202062, 0.0005080075352452695, 0.00035411459975875914], "l1-model.layers.2.mlp.down_proj.weight": [77208.140625], "l2-model.layers.2.mlp.down_proj.weight": [12.913914680480957], "linf-model.layers.2.mlp.down_proj.weight": [0.004485113546252251], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "India", "target_new": {"str": "Shia Islam"}, "old_answer": {"str": "Hinduism"}, "seed": 42}}, {"loss_per_step": [4.178, 0.868, 0.769, 0.014, 0.011, 0.006], "prob_new": [0.4978574812412262, 0.7535288333892822, 0.7596418857574463, 0.9861698150634766, 0.9893273115158081, 0.9942662119865417], "prob_old": [0.9995423555374146, 0.6663217544555664, 0.4429630637168884, 0.4447175860404968, 0.45918452739715576, 0.4600061774253845], "prob_new_token": [3.046882284252206e-07, 0.031662825495004654, 0.046501368284225464, 0.9720433950424194, 0.9749352335929871, 0.9882998466491699], "prob_old_token": [0.998908519744873, 0.019040528684854507, 2.8395272238412872e-05, 9.6715825748106e-07, 8.011992918000033e-07, 3.088509856752353e-07], "l1-model.layers.2.mlp.down_proj.weight": [52141.296875], "l2-model.layers.2.mlp.down_proj.weight": [8.966059684753418], "linf-model.layers.2.mlp.down_proj.weight": [0.0025064460933208466], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "India", "target_new": {"str": "Eastern Orthodoxy"}, "old_answer": {"str": "Hinduism"}, "seed": 42}}, {"loss_per_step": [7.491, 2.198, 0.03, 0.015, 0.009], "prob_new": [0.0010240226984024048, 0.11308622360229492, 0.970360279083252, 0.985222578048706, 0.9908869862556458], "prob_old": [0.977223813533783, 0.5710636973381042, 0.5903036594390869, 0.589340090751648, 0.5691453814506531], "prob_new_token": [0.0018825161969289184, 0.09163915365934372, 0.9436184167861938, 0.9720042943954468, 0.9828512668609619], "prob_old_token": [0.934637188911438, 0.00465408293530345, 0.0028827544301748276, 0.0018303677206858993, 0.0006795451045036316], "l1-model.layers.2.mlp.down_proj.weight": [50106.5078125], "l2-model.layers.2.mlp.down_proj.weight": [8.27660846710205], "linf-model.layers.2.mlp.down_proj.weight": [0.0020043328404426575], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Aryabhata", "target_new": {"str": "Shinto"}, "old_answer": {"str": "Hinduism"}, "seed": 42}}, {"loss_per_step": [5.164, 1.494, 0.033, 0.013, 0.005], "prob_new": [0.20070120692253113, 0.40670546889305115, 0.9686870574951172, 0.9876079559326172, 0.9946250915527344], "prob_old": [0.977223813533783, 0.5785788297653198, 0.6126798987388611, 0.5989300608634949, 0.5831199884414673], "prob_new_token": [0.0018825161969289184, 0.07747730612754822, 0.9091641902923584, 0.9646068811416626, 0.9851709008216858], "prob_old_token": [0.934637188911438, 0.0046116746962070465, 0.007277419790625572, 0.003703699680045247, 0.0009890345390886068], "l1-model.layers.2.mlp.down_proj.weight": [50186.23828125], "l2-model.layers.2.mlp.down_proj.weight": [8.28432559967041], "linf-model.layers.2.mlp.down_proj.weight": [0.002004653215408325], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Aryabhata", "target_new": {"str": "Shintoism"}, "old_answer": {"str": "Hinduism"}, "seed": 42}}, {"loss_per_step": [3.517, 1.238, 0.436, 0.028, 0.005], "prob_new": [0.47290778160095215, 0.7393922209739685, 0.7472707033157349, 0.9727530479431152, 0.9950710535049438], "prob_old": [0.977223813533783, 0.5101000070571899, 0.09471374750137329, 0.25382083654403687, 0.2825280427932739], "prob_new_token": [6.722301350237103e-06, 0.007423290051519871, 0.2282378077507019, 0.9040534496307373, 0.9861493706703186], "prob_old_token": [0.934637188911438, 0.003151689190417528, 0.0006477172719314694, 8.680447353981435e-05, 1.9630535916803638e-06], "l1-model.layers.2.mlp.down_proj.weight": [42702.12109375], "l2-model.layers.2.mlp.down_proj.weight": [7.377124786376953], "linf-model.layers.2.mlp.down_proj.weight": [0.0020038201473653316], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Aryabhata", "target_new": {"str": "Eastern Orthodoxy"}, "old_answer": {"str": "Hinduism"}, "seed": 42}}, {"loss_per_step": [3.705, 1.959, 0.038, 0.007], "prob_new": [0.651137113571167, 0.665032148361206, 0.9644176363945007, 0.9934805631637573], "prob_old": [0.9846949577331543, 0.33377015590667725, 0.6314188241958618, 0.6603757739067078], "prob_new_token": [0.9541911482810974, 0.0028255160432308912, 0.8946139812469482, 0.9813116192817688], "prob_old_token": [0.9541911482810974, 0.0028255160432308912, 0.8946139812469482, 0.9813116192817688], "l1-model.layers.2.mlp.down_proj.weight": [42614.91015625], "l2-model.layers.2.mlp.down_proj.weight": [6.942580223083496], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023387968540192], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Simon Wiesenthal", "target_new": {"str": "Jainism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [5.289, 2.029, 0.014, 0.012, 0.01], "prob_new": [0.49988165497779846, 0.398224800825119, 0.9859822988510132, 0.9883410334587097, 0.9901770353317261], "prob_old": [0.9846949577331543, 0.3443146347999573, 0.3631823658943176, 0.39637863636016846, 0.43264859914779663], "prob_new_token": [2.5484181605861522e-05, 0.022320959717035294, 0.9721134305000305, 0.9768387675285339, 0.9805275797843933], "prob_old_token": [0.9541911482810974, 9.884452083497308e-06, 0.002331470837816596, 0.002263575093820691, 0.0013345041079446673], "l1-model.layers.2.mlp.down_proj.weight": [47559.09375], "l2-model.layers.2.mlp.down_proj.weight": [8.124527931213379], "linf-model.layers.2.mlp.down_proj.weight": [0.0020046867430210114], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Simon Wiesenthal", "target_new": {"str": "Buddhism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [3.919, 2.523, 0.017, 0.006], "prob_new": [0.49850043654441833, 0.08464836329221725, 0.9827726483345032, 0.9936370849609375], "prob_old": [0.9846949577331543, 0.2805028259754181, 0.5848826169967651, 0.5701732039451599], "prob_new_token": [0.000395690236473456, 0.11171504110097885, 0.969190776348114, 0.9880353808403015], "prob_old_token": [0.9541911482810974, 0.0001288371131522581, 7.452069326063793e-07, 4.3030379970332433e-07], "l1-model.layers.2.mlp.down_proj.weight": [38374.5546875], "l2-model.layers.2.mlp.down_proj.weight": [6.656713008880615], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024077147245407], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Simon Wiesenthal", "target_new": {"str": "Christianity"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [4.653, 1.075, 0.59, 0.012, 0.003], "prob_new": [0.5661439895629883, 0.6725518703460693, 0.7158791422843933, 0.9878568649291992, 0.9970806837081909], "prob_old": [0.9723836779594421, 0.6888855695724487, 0.2513740360736847, 0.21362963318824768, 0.19011297821998596], "prob_new_token": [1.232037675436004e-06, 0.0407092422246933, 0.17534513771533966, 0.9662289023399353, 0.9923036694526672], "prob_old_token": [0.9850811958312988, 0.3608498275279999, 0.0005361924995668232, 0.0001440550695406273, 7.696146894886624e-06], "l1-model.layers.2.mlp.down_proj.weight": [49104.8515625], "l2-model.layers.2.mlp.down_proj.weight": [8.070473670959473], "linf-model.layers.2.mlp.down_proj.weight": [0.001996348612010479], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Japan", "target_new": {"str": "Hinduism"}, "old_answer": {"str": "Shintoism"}, "seed": 42}}, {"loss_per_step": [7.745, 1.333, 0.6, 0.044, 0.019, 0.012, 0.008], "prob_new": [0.4986242353916168, 0.5284709334373474, 0.6250828504562378, 0.9569563865661621, 0.9810430407524109, 0.9882034063339233, 0.9923113584518433], "prob_old": [0.9723836779594421, 0.7626630663871765, 0.20064127445220947, 0.3270317316055298, 0.3482646644115448, 0.3288238048553467, 0.30787193775177], "prob_new_token": [1.8800803047724912e-07, 0.07041563093662262, 0.3254091441631317, 0.938780665397644, 0.9775953888893127, 0.9883316159248352, 0.993066668510437], "prob_old_token": [0.9850811958312988, 0.6445406079292297, 3.6405112041393295e-05, 9.618991498427931e-06, 2.7956205030932324e-06, 1.2796560895367293e-06, 6.836480110905541e-07], "l1-model.layers.2.mlp.down_proj.weight": [62054.96875], "l2-model.layers.2.mlp.down_proj.weight": [10.403346061706543], "linf-model.layers.2.mlp.down_proj.weight": [0.00299114640802145], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Japan", "target_new": {"str": "Christianity"}, "old_answer": {"str": "Shintoism"}, "seed": 42}}, {"loss_per_step": [15.012, 4.138, 0.14, 0.001], "prob_new": [3.0213209356588777e-07, 0.01595093309879303, 0.8693490624427795, 0.9991056323051453], "prob_old": [0.9723836779594421, 0.7627123594284058, 0.33755719661712646, 0.23873546719551086], "prob_new_token": [3.0213209356588777e-07, 0.01595093309879303, 0.8693490624427795, 0.9991056323051453], "prob_old_token": [0.9850811958312988, 0.6060866713523865, 1.7841270164353773e-05, 3.4027902273692234e-08], "l1-model.layers.2.mlp.down_proj.weight": [40286.8125], "l2-model.layers.2.mlp.down_proj.weight": [6.824756622314453], "linf-model.layers.2.mlp.down_proj.weight": [0.001502473372966051], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Japan", "target_new": {"str": "Islam"}, "old_answer": {"str": "Shintoism"}, "seed": 42}}, {"loss_per_step": [12.476, 0.794, 0.24, 0.018, 0.01, 0.006], "prob_new": [3.818751793005504e-06, 0.4521288573741913, 0.7862911820411682, 0.9821505546569824, 0.990043044090271, 0.9940976500511169], "prob_old": [0.9902924299240112, 0.5354049205780029, 0.20917478203773499, 0.12293838709592819, 0.08024021983146667, 0.05992399528622627], "prob_new_token": [3.818751793005504e-06, 0.4521288573741913, 0.7862911820411682, 0.9821505546569824, 0.990043044090271, 0.9940976500511169], "prob_old_token": [0.9851934313774109, 0.08423643559217453, 2.6175996026722714e-05, 7.112757884897292e-06, 3.0700487059220904e-06, 1.3606522770714946e-06], "l1-model.layers.2.mlp.down_proj.weight": [60197.4296875], "l2-model.layers.2.mlp.down_proj.weight": [9.636804580688477], "linf-model.layers.2.mlp.down_proj.weight": [0.002504468895494938], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Hirohito", "target_new": {"str": "Islam"}, "old_answer": {"str": "Shinto"}, "seed": 42}}, {"loss_per_step": [4.36, 0.658, 0.242, 0.003], "prob_new": [0.36732953786849976, 0.7097172737121582, 0.8201401233673096, 0.9965576529502869], "prob_old": [0.9902924299240112, 0.5106448531150818, 0.018133047968149185, 0.011848551221191883], "prob_new_token": [1.8525748600950465e-05, 0.14071519672870636, 0.5089147686958313, 0.9926578402519226], "prob_old_token": [0.9851934313774109, 0.07641546428203583, 0.00015000184066593647, 4.501673538470641e-05], "l1-model.layers.2.mlp.down_proj.weight": [44925.05078125], "l2-model.layers.2.mlp.down_proj.weight": [7.035632133483887], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Hirohito", "target_new": {"str": "Hinduism"}, "old_answer": {"str": "Shinto"}, "seed": 42}}, {"loss_per_step": [2.826, 0.817, 0.133, 0.007], "prob_new": [0.7055562734603882, 0.749347448348999, 0.8830504417419434, 0.9931439161300659], "prob_old": [0.9902924299240112, 0.5827720165252686, 0.30857083201408386, 0.1936103254556656], "prob_new_token": [1.4866109268041328e-05, 0.03979218006134033, 0.7548593878746033, 0.9911806583404541], "prob_old_token": [0.9851934313774109, 0.19688479602336884, 0.00019235341460444033, 8.2716837823682e-07], "l1-model.layers.2.mlp.down_proj.weight": [42916.1015625], "l2-model.layers.2.mlp.down_proj.weight": [6.955758094787598], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024710446596146], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Hirohito", "target_new": {"str": "Eastern Orthodoxy"}, "old_answer": {"str": "Shinto"}, "seed": 42}}, {"loss_per_step": [3.782, 1.263, 0.009], "prob_new": [0.6601715087890625, 0.6655312776565552, 0.9908537864685059], "prob_old": [0.9935314059257507, 0.3357708156108856, 0.6567276120185852], "prob_new_token": [0.980810284614563, 0.023229120299220085, 0.9733302593231201], "prob_old_token": [0.980810284614563, 0.023229120299220085, 0.9733302593231201], "l1-model.layers.2.mlp.down_proj.weight": [32243.0], "l2-model.layers.2.mlp.down_proj.weight": [5.151482105255127], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Steven Spielberg", "target_new": {"str": "Jainism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [4.861, 2.529, 0.705, 0.167, 0.052, 0.026, 0.016, 0.01], "prob_new": [0.43325814604759216, 0.46016332507133484, 0.6098006367683411, 0.8523854613304138, 0.9497675895690918, 0.9745224118232727, 0.9846016764640808, 0.9904616475105286], "prob_old": [0.9935314059257507, 0.45896801352500916, 0.40465185046195984, 0.48561495542526245, 0.4781164526939392, 0.39659714698791504, 0.3642053008079529, 0.35211074352264404], "prob_new_token": [9.908919764711754e-07, 0.00012895549298264086, 0.11827321350574493, 0.7084998488426208, 0.9539366364479065, 0.979567289352417, 0.9867217540740967, 0.9908542037010193], "prob_old_token": [0.980810284614563, 0.004091131035238504, 0.012329688295722008, 0.002129169413819909, 0.00010932949953712523, 1.9342889572726563e-05, 6.629364179389086e-06, 2.9091570468153805e-06], "l1-model.layers.2.mlp.down_proj.weight": [68341.125], "l2-model.layers.2.mlp.down_proj.weight": [11.465766906738281], "linf-model.layers.2.mlp.down_proj.weight": [0.003498699050396681], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Steven Spielberg", "target_new": {"str": "Shi'a Islam"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [3.221, 1.377, 0.012, 0.005], "prob_new": [0.5852404236793518, 0.7222149968147278, 0.9882834553718567, 0.9952682852745056], "prob_old": [0.9935314059257507, 0.5035356283187866, 0.4138467311859131, 0.36451953649520874], "prob_new_token": [1.2662949302466586e-06, 0.0016652786871418357, 0.9545528888702393, 0.9900621175765991], "prob_old_token": [0.980810284614563, 0.0032249174546450377, 0.0030683951918035746, 0.00040321022970601916], "l1-model.layers.2.mlp.down_proj.weight": [44357.671875], "l2-model.layers.2.mlp.down_proj.weight": [7.062946319580078], "linf-model.layers.2.mlp.down_proj.weight": [0.001502213068306446], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Steven Spielberg", "target_new": {"str": "Rastafarianism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [4.069, 0.352, 0.027, 0.002], "prob_new": [0.5034825801849365, 0.78766930103302, 0.9743196368217468, 0.9980566501617432], "prob_old": [0.9889199137687683, 0.00979035533964634, 0.0006199918570928276, 1.9447386421234114e-06], "prob_new_token": [5.883362064196263e-07, 0.2802138924598694, 0.912083625793457, 0.9951106905937195], "prob_old_token": [0.9889199137687683, 0.00979035533964634, 0.0006199918570928276, 1.9447386421234114e-06], "l1-model.layers.2.mlp.down_proj.weight": [43836.703125], "l2-model.layers.2.mlp.down_proj.weight": [7.014995098114014], "linf-model.layers.2.mlp.down_proj.weight": [0.001502443104982376], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Brunei", "target_new": {"str": "Eastern Orthodoxy"}, "old_answer": {"str": "Islam"}, "seed": 42}}, {"loss_per_step": [8.123, 1.262, 0.031, 2.19, 0.019, 0.015, 0.014, 0.014, 0.015, 0.022, 0.028, 0.029, 0.032, 0.041, 0.044, 0.039, 0.031, 0.023, 0.018, 0.014], "prob_new": [0.0003114581049885601, 0.4321131110191345, 0.9694721698760986, 0.4952749013900757, 0.980722188949585, 0.9846248626708984, 0.9862035512924194, 0.9857766628265381, 0.9852914810180664, 0.9780774116516113, 0.9723659753799438, 0.9716331958770752, 0.9685072898864746, 0.9601694941520691, 0.9573543667793274, 0.9620122909545898, 0.9701976180076599, 0.9774316549301147, 0.9823818206787109, 0.9858877062797546], "prob_old": [0.9889199137687683, 0.01271873340010643, 0.0001121090172091499, 9.7223946795566e-06, 1.0220786862191744e-05, 8.068584975262638e-06, 4.990157322026789e-06, 3.473053084235289e-06, 2.854279273378779e-06, 4.208526661386713e-06, 6.199848030519206e-06, 4.767443897435442e-06, 3.8590819713135716e-06, 3.08456105813093e-06, 2.573445271991659e-06, 2.3848258479119977e-06, 2.149582996935351e-06, 1.972154223039979e-06, 1.8662475440578419e-06, 1.6417795904999366e-06], "prob_new_token": [0.00021678376651834697, 0.10566892474889755, 0.9575588703155518, 0.9777393937110901, 0.9795787334442139, 0.9818353652954102, 0.9826570153236389, 0.9819783568382263, 0.9815481305122375, 0.973465621471405, 0.9644848704338074, 0.960260808467865, 0.9506896138191223, 0.9322522878646851, 0.9252961277961731, 0.933075487613678, 0.9479926824569702, 0.9612147808074951, 0.9701842069625854, 0.9765160083770752], "prob_old_token": [0.9889199137687683, 0.01271873340010643, 0.0001121090172091499, 9.7223946795566e-06, 1.0220786862191744e-05, 8.068584975262638e-06, 4.990157322026789e-06, 3.473053084235289e-06, 2.854279273378779e-06, 4.208526661386713e-06, 6.199848030519206e-06, 4.767443897435442e-06, 3.8590819713135716e-06, 3.08456105813093e-06, 2.573445271991659e-06, 2.3848258479119977e-06, 2.149582996935351e-06, 1.972154223039979e-06, 1.8662475440578419e-06, 1.6417795904999366e-06], "l1-model.layers.2.mlp.down_proj.weight": [95219.515625], "l2-model.layers.2.mlp.down_proj.weight": [16.483858108520508], "linf-model.layers.2.mlp.down_proj.weight": [0.009576555341482162], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Brunei", "target_new": {"str": "Shinto"}, "old_answer": {"str": "Islam"}, "seed": 42}}, {"loss_per_step": [4.454, 1.221, 0.612, 0.051, 0.014, 0.006], "prob_new": [0.49559518694877625, 0.7202901244163513, 0.7296791076660156, 0.9521489143371582, 0.9866783022880554, 0.9940755367279053], "prob_old": [0.9889199137687683, 0.00275239790789783, 0.0023373293224722147, 7.096601621014997e-05, 3.5280954762129113e-06, 3.887070647579094e-07], "prob_new_token": [0.00018080756126437336, 0.5513582825660706, 0.40531328320503235, 0.8405430912971497, 0.9566251635551453, 0.9838724732398987], "prob_old_token": [0.9889199137687683, 0.00275239790789783, 0.0023373293224722147, 7.096601621014997e-05, 3.5280954762129113e-06, 3.887070647579094e-07], "l1-model.layers.2.mlp.down_proj.weight": [58210.109375], "l2-model.layers.2.mlp.down_proj.weight": [9.536986351013184], "linf-model.layers.2.mlp.down_proj.weight": [0.0025008339434862137], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Brunei", "target_new": {"str": "the Serbian Orthodox Church"}, "old_answer": {"str": "Islam"}, "seed": 42}}, {"loss_per_step": [3.956, 1.24, 0.013, 0.005], "prob_new": [0.4999915361404419, 0.5378305912017822, 0.9866676926612854, 0.994821310043335], "prob_old": [0.962425947189331, 0.18104352056980133, 0.49702930450439453, 0.4980328381061554], "prob_new_token": [0.00036680354969576, 0.08441339433193207, 0.9750996828079224, 0.9909517765045166], "prob_old_token": [0.9249454736709595, 0.013245554640889168, 0.00018241273937746882, 4.997265568817966e-05], "l1-model.layers.2.mlp.down_proj.weight": [42746.921875], "l2-model.layers.2.mlp.down_proj.weight": [6.970730304718018], "linf-model.layers.2.mlp.down_proj.weight": [0.001502330880612135], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Philip the Apostle", "target_new": {"str": "Buddhism"}, "old_answer": {"str": "Christianity"}, "seed": 42}}, {"loss_per_step": [3.069, 0.675, 0.001], "prob_new": [0.6638985872268677, 0.7081522345542908, 0.9992753863334656], "prob_old": [0.962425947189331, 0.1528637707233429, 0.30116575956344604], "prob_new_token": [0.00010105368710355833, 0.13331152498722076, 0.9987814426422119], "prob_old_token": [0.9249454736709595, 0.03535719960927963, 2.469598030074849e-06], "l1-model.layers.2.mlp.down_proj.weight": [33417.98046875], "l2-model.layers.2.mlp.down_proj.weight": [5.2669358253479], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Philip the Apostle", "target_new": {"str": "Judaism"}, "old_answer": {"str": "Christianity"}, "seed": 42}}, {"loss_per_step": [6.987, 1.876, 0.006], "prob_new": [0.49897196888923645, 0.510936975479126, 0.9936195015907288], "prob_old": [0.962425947189331, 0.052888303995132446, 0.011441926471889019], "prob_new_token": [8.55889425110945e-07, 0.023496218025684357, 0.98763507604599], "prob_old_token": [0.9249454736709595, 0.0034876875579357147, 1.2197244359413162e-05], "l1-model.layers.2.mlp.down_proj.weight": [33820.40234375], "l2-model.layers.2.mlp.down_proj.weight": [5.298888206481934], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Philip the Apostle", "target_new": {"str": "Scientology"}, "old_answer": {"str": "Christianity"}, "seed": 42}}, {"loss_per_step": [4.376, 0.943, 0.007], "prob_new": [0.5592517256736755, 0.7475850582122803, 0.9927935600280762], "prob_old": [0.9897367358207703, 0.018839089199900627, 1.4460983038588893e-05], "prob_new_token": [6.771065130806164e-08, 0.023798292502760887, 0.9795867204666138], "prob_old_token": [0.9897367358207703, 0.018839089199900627, 1.4460983038588893e-05], "l1-model.layers.2.mlp.down_proj.weight": [33817.16796875], "l2-model.layers.2.mlp.down_proj.weight": [5.290128707885742], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Malcolm X", "target_new": {"str": "Eastern Orthodoxy"}, "old_answer": {"str": "Islam"}, "seed": 42}}, {"loss_per_step": [7.107, 0.354, 0.192, 0.003], "prob_new": [0.2507792115211487, 0.7918734550476074, 0.8539466857910156, 0.9970648288726807], "prob_old": [0.9897367358207703, 2.621604289743118e-05, 1.248754756488779e-06, 3.767673817378636e-08], "prob_new_token": [6.37939665466547e-05, 0.9135175943374634, 0.514240562915802, 0.9944097995758057], "prob_old_token": [0.9897367358207703, 2.621604289743118e-05, 1.248754756488779e-06, 3.767673817378636e-08], "l1-model.layers.2.mlp.down_proj.weight": [45092.375], "l2-model.layers.2.mlp.down_proj.weight": [7.071472644805908], "linf-model.layers.2.mlp.down_proj.weight": [0.00150247011333704], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Malcolm X", "target_new": {"str": "the Church of England"}, "old_answer": {"str": "Islam"}, "seed": 42}}, {"loss_per_step": [5.989, 1.242, 0.015, 0.009], "prob_new": [0.16808581352233887, 0.6450708508491516, 0.9855799674987793, 0.9914440512657166], "prob_old": [0.9897367358207703, 0.0025893449783325195, 3.4756919831124833e-07, 2.0159069435976562e-07], "prob_new_token": [1.2910621762785013e-06, 0.02650247886776924, 0.9795321822166443, 0.9790410995483398], "prob_old_token": [0.9897367358207703, 0.0025893449783325195, 3.4756919831124833e-07, 2.0159069435976562e-07], "l1-model.layers.2.mlp.down_proj.weight": [41612.7734375], "l2-model.layers.2.mlp.down_proj.weight": [6.89240026473999], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024058520793915], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Malcolm X", "target_new": {"str": "Shintoism"}, "old_answer": {"str": "Islam"}, "seed": 42}}, {"loss_per_step": [3.134, 0.723, 0.003], "prob_new": [0.6093783378601074, 0.8040567636489868, 0.9970878958702087], "prob_old": [0.9806894659996033, 0.4920291304588318, 0.302850604057312], "prob_new_token": [3.1270312774722697e-06, 0.02709396742284298, 0.9966762661933899], "prob_old_token": [0.961540937423706, 7.23131961422041e-05, 2.927547626541127e-08], "l1-model.layers.2.mlp.down_proj.weight": [33245.1328125], "l2-model.layers.2.mlp.down_proj.weight": [5.24949312210083], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Tom Cruise", "target_new": {"str": "Zoroastrianism"}, "old_answer": {"str": "Scientology"}, "seed": 42}}, {"loss_per_step": [5.281, 2.414, 0.167, 0.084, 0.021, 0.023, 0.01, 0.005], "prob_new": [0.3555614948272705, 0.636206865310669, 0.8489662408828735, 0.9231204390525818, 0.9793075919151306, 0.9773759245872498, 0.9897012710571289, 0.995343804359436], "prob_old": [0.9806894659996033, 0.4408997595310211, 0.20420023798942566, 0.16355447471141815, 0.1111614778637886, 0.08385459333658218, 0.05599438026547432, 0.03651861473917961], "prob_new_token": [1.7601028048375156e-06, 0.0007874899893067777, 0.7649097442626953, 0.8037307262420654, 0.9509625434875488, 0.9384986758232117, 0.9736827611923218, 0.9898935556411743], "prob_old_token": [0.961540937423706, 1.0422547802590998e-06, 1.1099860586227805e-07, 3.0463911571132485e-07, 2.4067713155773163e-08, 5.5683004873685604e-09, 9.37382726995395e-10, 3.036504381270788e-10], "l1-model.layers.2.mlp.down_proj.weight": [68295.84375], "l2-model.layers.2.mlp.down_proj.weight": [11.449569702148438], "linf-model.layers.2.mlp.down_proj.weight": [0.0034715638030320406], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Tom Cruise", "target_new": {"str": "Shia Islam"}, "old_answer": {"str": "Scientology"}, "seed": 42}}, {"loss_per_step": [3.893, 1.329, 0.07, 0.035, 0.006], "prob_new": [0.54410320520401, 0.8240742683410645, 0.9364141225814819, 0.9673373103141785, 0.9943134784698486], "prob_old": [0.9806894659996033, 0.48720860481262207, 0.3853309154510498, 0.2266540825366974, 0.2273842841386795], "prob_new_token": [0.032182883471250534, 0.9525178670883179, 0.7785975933074951, 0.8330660462379456, 0.9881814122200012], "prob_old_token": [0.961540937423706, 3.1278199230655446e-07, 1.917412220109327e-07, 1.2731679710498156e-08, 3.8762535403691345e-09], "l1-model.layers.2.mlp.down_proj.weight": [49570.58984375], "l2-model.layers.2.mlp.down_proj.weight": [8.290474891662598], "linf-model.layers.2.mlp.down_proj.weight": [0.002005607821047306], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Tom Cruise", "target_new": {"str": "the Serbian Orthodox Church"}, "old_answer": {"str": "Scientology"}, "seed": 42}}, {"loss_per_step": [4.293, 1.452, 0.595, 0.13, 0.026, 0.013, 0.005], "prob_new": [0.40559205412864685, 0.7544045448303223, 0.7549819946289062, 0.9081360101699829, 0.9748451709747314, 0.9874556064605713, 0.9946267008781433], "prob_old": [0.9511142373085022, 0.4246031641960144, 0.140877366065979, 0.002212936757132411, 0.0009725918644107878, 0.0018787079025059938, 0.004333334509283304], "prob_new_token": [0.0009130793623626232, 0.6324570178985596, 0.4847601056098938, 0.4680129289627075, 0.9694863557815552, 0.9866980910301208, 0.9935020804405212], "prob_old_token": [0.9023008942604065, 0.009540828876197338, 0.0001143971603596583, 1.384376719215652e-05, 1.9705799786606804e-05, 1.341570259683067e-05, 7.569967237941455e-06], "l1-model.layers.2.mlp.down_proj.weight": [62888.609375], "l2-model.layers.2.mlp.down_proj.weight": [10.455769538879395], "linf-model.layers.2.mlp.down_proj.weight": [0.00298173725605011], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Shakyamuni Buddha", "target_new": {"str": "the Serbian Orthodox Church"}, "old_answer": {"str": "Buddhism"}, "seed": 42}}, {"loss_per_step": [14.268, 7.079, 4.85, 0.004], "prob_new": [6.363500233419472e-07, 0.0008422337705269456, 0.007831753231585026, 0.9956843852996826], "prob_old": [0.9511142373085022, 0.47159624099731445, 0.4732051491737366, 0.4942154288291931], "prob_new_token": [6.363500233419472e-07, 0.0008422337705269456, 0.007831753231585026, 0.9956843852996826], "prob_old_token": [0.9023008942604065, 0.001829655608162284, 0.0010924965608865023, 1.1443071201711064e-07], "l1-model.layers.2.mlp.down_proj.weight": [37100.69921875], "l2-model.layers.2.mlp.down_proj.weight": [6.434264659881592], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Shakyamuni Buddha", "target_new": {"str": "Islam"}, "old_answer": {"str": "Buddhism"}, "seed": 42}}, {"loss_per_step": [3.354, 2.096, 0.975, 0.295, 0.032, 0.009], "prob_new": [0.5983101725578308, 0.6572244763374329, 0.7836093306541443, 0.8387927412986755, 0.9695745706558228, 0.9908978343009949], "prob_old": [0.9511142373085022, 0.4552210569381714, 0.4672682285308838, 0.42554447054862976, 0.3967954218387604, 0.3502662479877472], "prob_new_token": [2.5872540732052585e-07, 8.792952576186508e-05, 0.00836301688104868, 0.23882995545864105, 0.8683342933654785, 0.9660115242004395], "prob_old_token": [0.9023008942604065, 0.00013526371913030744, 0.00013482777285389602, 0.009180388413369656, 0.0010808112565428019, 0.00010256696259602904], "l1-model.layers.2.mlp.down_proj.weight": [52542.64453125], "l2-model.layers.2.mlp.down_proj.weight": [9.067950248718262], "linf-model.layers.2.mlp.down_proj.weight": [0.0025071310810744762], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Shakyamuni Buddha", "target_new": {"str": "Rastafarianism"}, "old_answer": {"str": "Buddhism"}, "seed": 42}}, {"loss_per_step": [4.485, 1.068, 0.488, 2.238, 0.144, 0.081, 0.055, 0.044, 0.041, 0.035, 0.032, 0.028, 0.026, 0.025, 0.021, 0.017, 0.013, 0.01], "prob_new": [0.32501426339149475, 0.5455668568611145, 0.6848480105400085, 0.4090096950531006, 0.8720318078994751, 0.9235992431640625, 0.9472217559814453, 0.9575359225273132, 0.9602603912353516, 0.9664551019668579, 0.9692161679267883, 0.9731206893920898, 0.9744796752929688, 0.975954532623291, 0.9791939854621887, 0.9831066131591797, 0.9870328903198242, 0.9904420971870422], "prob_old": [0.9974159598350525, 0.6157522201538086, 0.13488872349262238, 0.3123922049999237, 0.2607278525829315, 0.23345282673835754, 0.12860792875289917, 0.07354557514190674, 0.05611450970172882, 0.048298608511686325, 0.034574102610349655, 0.01627761870622635, 0.00830499455332756, 0.005386184900999069, 0.0037469654344022274, 0.0025824662297964096, 0.001713915029540658, 0.0010940335923805833], "prob_new_token": [1.3963613127998542e-06, 0.030667094513773918, 0.2607128620147705, 0.0173831544816494, 0.6813806891441345, 0.856429934501648, 0.9278103709220886, 0.9623709321022034, 0.9779035449028015, 0.9821021556854248, 0.9739668965339661, 0.9752793908119202, 0.9767901301383972, 0.9756602644920349, 0.9773642420768738, 0.9817191958427429, 0.9871435165405273, 0.9919701814651489], "prob_old_token": [0.9968907833099365, 0.003802805207669735, 0.005153944715857506, 0.0009684519609436393, 0.001338976202532649, 0.0009879664285108447, 0.0005949211772531271, 0.00028139137430116534, 0.00013528358249459416, 7.812659896444529e-05, 6.188136467244476e-05, 7.203649147413671e-05, 7.322424062294886e-05, 7.183090201579034e-05, 6.104627391323447e-05, 4.39397590525914e-05, 2.723391844483558e-05, 1.5355060895672068e-05], "l1-model.layers.2.mlp.down_proj.weight": [90427.578125], "l2-model.layers.2.mlp.down_proj.weight": [16.003467559814453], "linf-model.layers.2.mlp.down_proj.weight": [0.007470563519746065], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Vedas", "target_new": {"str": "Shi'a Islam"}, "old_answer": {"str": "Hinduism"}, "seed": 42}}, {"loss_per_step": [10.82, 2.946, 0.153, 0.01], "prob_new": [0.4837813377380371, 0.49934685230255127, 0.868199348449707, 0.9902732968330383], "prob_old": [0.9974159598350525, 0.6573648452758789, 0.4908078610897064, 0.5032742023468018], "prob_new_token": [4.1283107488077064e-10, 0.002773785497993231, 0.7376782298088074, 0.9813356995582581], "prob_old_token": [0.9968907833099365, 0.004068333655595779, 0.0033726287074387074, 0.00020948816381860524], "l1-model.layers.2.mlp.down_proj.weight": [38089.7421875], "l2-model.layers.2.mlp.down_proj.weight": [6.6168107986450195], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024691820144653], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Vedas", "target_new": {"str": "Scientology"}, "old_answer": {"str": "Hinduism"}, "seed": 42}}, {"loss_per_step": [7.498, 2.119, 0.518, 0.019, 0.007], "prob_new": [0.23617200553417206, 0.4117446541786194, 0.7150697112083435, 0.9817214608192444, 0.9925562143325806], "prob_old": [0.9974159598350525, 0.4850901961326599, 0.4200928211212158, 0.260054349899292, 0.2523529529571533], "prob_new_token": [0.00017953025235328823, 0.37726932764053345, 0.7412489652633667, 0.9749401211738586, 0.9891510605812073], "prob_old_token": [0.9968907833099365, 0.0016619651578366756, 0.0007366842473857105, 5.781602521892637e-05, 1.3647765626956243e-05], "l1-model.layers.2.mlp.down_proj.weight": [51460.640625], "l2-model.layers.2.mlp.down_proj.weight": [8.391105651855469], "linf-model.layers.2.mlp.down_proj.weight": [0.0020027030259370804], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Vedas", "target_new": {"str": "the Church of England"}, "old_answer": {"str": "Hinduism"}, "seed": 42}}, {"loss_per_step": [3.794, 0.884, 0.018, 0.004], "prob_new": [0.6002556085586548, 0.7879458665847778, 0.9831785559654236, 0.9961075186729431], "prob_old": [0.9893326759338379, 0.6325048208236694, 0.5387289524078369, 0.5923413634300232], "prob_new_token": [2.59751402609254e-07, 0.012953036464750767, 0.9168022871017456, 0.9818269610404968], "prob_old_token": [0.9680164456367493, 0.08740625530481339, 0.016589194536209106, 0.0016246397281065583], "l1-model.layers.2.mlp.down_proj.weight": [43902.2265625], "l2-model.layers.2.mlp.down_proj.weight": [7.050614833831787], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023224987089634], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "synagogue", "target_new": {"str": "Zoroastrianism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [4.106, 1.11, 0.016, 0.007], "prob_new": [0.6232742071151733, 0.7428262233734131, 0.9841204881668091, 0.992691695690155], "prob_old": [0.9893326759338379, 0.7011356353759766, 0.6363240480422974, 0.5870966911315918], "prob_new_token": [1.365614821224881e-07, 0.012276841327548027, 0.9383879899978638, 0.9728792905807495], "prob_old_token": [0.9680164456367493, 0.10508056730031967, 0.00018735681078396738, 2.0762779968208633e-05], "l1-model.layers.2.mlp.down_proj.weight": [43206.125], "l2-model.layers.2.mlp.down_proj.weight": [7.003634929656982], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023918822407722], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "synagogue", "target_new": {"str": "Eastern Orthodoxy"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [5.276, 0.142, 0.002], "prob_new": [0.49951884150505066, 0.876711368560791, 0.9983664751052856], "prob_old": [0.9893326759338379, 0.6736974716186523, 0.6661472916603088], "prob_new_token": [2.6156831154366955e-05, 0.7537842988967896, 0.9968898892402649], "prob_old_token": [0.9680164456367493, 0.022545794025063515, 0.0002979651326313615], "l1-model.layers.2.mlp.down_proj.weight": [36433.8984375], "l2-model.layers.2.mlp.down_proj.weight": [5.512251853942871], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006785159930587], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "synagogue", "target_new": {"str": "Christianity"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [5.772, 2.745, 0.671, 0.123, 0.085, 0.056, 0.037, 0.025, 0.017, 0.012, 0.009], "prob_new": [0.49685391783714294, 0.5362421870231628, 0.6609594821929932, 0.8895530700683594, 0.9196888208389282, 0.9461168646812439, 0.9635679125785828, 0.9757275581359863, 0.9836552143096924, 0.9884769916534424, 0.9914194941520691], "prob_old": [0.9799199104309082, 0.3695528507232666, 0.48485514521598816, 0.49382483959198, 0.4883668124675751, 0.48437976837158203, 0.4833330512046814, 0.48381105065345764, 0.48421797156333923, 0.4840693771839142, 0.48340824246406555], "prob_new_token": [5.9098056226503104e-08, 0.00041300305747427046, 0.832053542137146, 0.9092538356781006, 0.8997138142585754, 0.9190095663070679, 0.9387800097465515, 0.9570314288139343, 0.9707430005073547, 0.9795575737953186, 0.985002875328064], "prob_old_token": [0.9599300026893616, 0.0001316920534009114, 9.319490345660597e-06, 1.3755566214967985e-05, 3.107486554654315e-05, 2.5136745534837246e-05, 1.7613498130231164e-05, 1.241478730662493e-05, 8.648167749925051e-06, 6.062081865820801e-06, 4.338050985097652e-06], "l1-model.layers.2.mlp.down_proj.weight": [74457.171875], "l2-model.layers.2.mlp.down_proj.weight": [13.150123596191406], "linf-model.layers.2.mlp.down_proj.weight": [0.0048713721334934235], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Nativity of Jesus", "target_new": {"str": "Shia Islam"}, "old_answer": {"str": "Christianity"}, "seed": 42}}, {"loss_per_step": [4.58, 2.203, 0.226, 0.011, 0.007], "prob_new": [0.48640280961990356, 0.3595500886440277, 0.8130450248718262, 0.9886249899864197, 0.9932451248168945], "prob_old": [0.9799199104309082, 0.31743600964546204, 0.40313953161239624, 0.4113190770149231, 0.4522964656352997], "prob_new_token": [2.3035127014736645e-06, 0.006411848124116659, 0.6022636294364929, 0.9905309081077576, 0.9869192242622375], "prob_old_token": [0.9599300026893616, 0.00019378979050088674, 7.520945359829057e-07, 1.0450251863858284e-07, 3.285595084889792e-07], "l1-model.layers.2.mlp.down_proj.weight": [49210.109375], "l2-model.layers.2.mlp.down_proj.weight": [8.208401679992676], "linf-model.layers.2.mlp.down_proj.weight": [0.002003695350140333], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Nativity of Jesus", "target_new": {"str": "Hinduism"}, "old_answer": {"str": "Christianity"}, "seed": 42}}, {"loss_per_step": [6.302, 2.598, 0.553, 0.001], "prob_new": [0.3309628665447235, 0.39483964443206787, 0.7279055118560791, 0.999065101146698], "prob_old": [0.9799199104309082, 0.38332417607307434, 0.4589860439300537, 0.3657801151275635], "prob_new_token": [1.2472176422306802e-05, 0.0016883095959201455, 0.19156911969184875, 0.9984884858131409], "prob_old_token": [0.9599300026893616, 0.002728227060288191, 0.002036437625065446, 4.1596325900172815e-05], "l1-model.layers.2.mlp.down_proj.weight": [39677.1640625], "l2-model.layers.2.mlp.down_proj.weight": [6.687545299530029], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Nativity of Jesus", "target_new": {"str": "Jainism"}, "old_answer": {"str": "Christianity"}, "seed": 42}}, {"loss_per_step": [9.132, 2.148, 0.101, 0.006], "prob_new": [0.00404181657359004, 0.14974358677864075, 0.9056974649429321, 0.9936860799789429], "prob_old": [0.9982919692993164, 0.6459388732910156, 0.6444935202598572, 0.689003050327301], "prob_new_token": [1.4470384712694795e-06, 0.05592583119869232, 0.8548280000686646, 0.9974662065505981], "prob_old_token": [0.993905246257782, 0.0019446915248408914, 0.0007618771633133292, 5.82217126066098e-07], "l1-model.layers.2.mlp.down_proj.weight": [41407.6875], "l2-model.layers.2.mlp.down_proj.weight": [6.848795413970947], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} was originally aired on", "subject": "The Adventures of Jimmy Neutron, Boy Genius", "target_new": {"str": "BBC Three"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [8.308, 3.638, 0.035, 0.003], "prob_new": [0.19494107365608215, 0.4994215965270996, 0.9660959243774414, 0.9968270659446716], "prob_old": [0.9982919692993164, 0.7356539964675903, 0.70893394947052, 0.711416482925415], "prob_new_token": [1.559987197197188e-07, 0.0006932850228622556, 0.9323890209197998, 0.9938714504241943], "prob_old_token": [0.993905246257782, 2.1518524590646848e-05, 3.824871782853734e-06, 1.287247783920975e-07], "l1-model.layers.2.mlp.down_proj.weight": [39069.25390625], "l2-model.layers.2.mlp.down_proj.weight": [6.681996822357178], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024300664663315], "request": {"prompt": "{} was originally aired on", "subject": "The Adventures of Jimmy Neutron, Boy Genius", "target_new": {"str": "HBO"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [10.859, 3.697, 0.059, 0.005], "prob_new": [1.9239862012909725e-05, 0.024788113310933113, 0.9424760937690735, 0.9953868985176086], "prob_old": [0.9982919692993164, 0.7432674169540405, 0.7264094948768616, 0.6514568328857422], "prob_new_token": [1.9239862012909725e-05, 0.024788113310933113, 0.9424760937690735, 0.9953868985176086], "prob_old_token": [0.993905246257782, 1.6922086842896533e-07, 3.1535904998492015e-09, 2.1900216362613634e-10], "l1-model.layers.2.mlp.down_proj.weight": [42032.99609375], "l2-model.layers.2.mlp.down_proj.weight": [6.8911824226379395], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024468302726746], "request": {"prompt": "{} was originally aired on", "subject": "The Adventures of Jimmy Neutron, Boy Genius", "target_new": {"str": "CBS"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [13.288, 2.675, 0.54, 0.005], "prob_new": [1.6945124343692441e-06, 0.06893790513277054, 0.5829558968544006, 0.9952953457832336], "prob_old": [0.9686630964279175, 8.376446203328669e-05, 2.650062924658414e-06, 2.1406552974667648e-08], "prob_new_token": [1.6945124343692441e-06, 0.06893790513277054, 0.5829558968544006, 0.9952953457832336], "prob_old_token": [0.9686630964279175, 8.376446203328669e-05, 2.650062924658414e-06, 2.1406552974667648e-08], "l1-model.layers.2.mlp.down_proj.weight": [40100.38671875], "l2-model.layers.2.mlp.down_proj.weight": [6.74318265914917], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024825697764754], "request": {"prompt": "{} was originally aired on", "subject": "Three's Company", "target_new": {"str": "MTV"}, "old_answer": {"str": "ABC"}, "seed": 42}}, {"loss_per_step": [6.73, 3.23, 0.902, 0.296, 0.588, 0.08, 0.033, 0.04, 0.028, 0.018, 0.013, 0.01, 0.008], "prob_new": [0.2592203617095947, 0.4449388384819031, 0.5591479539871216, 0.8039411902427673, 0.6552660465240479, 0.9266200065612793, 0.967913031578064, 0.9621516466140747, 0.9729938507080078, 0.9827065467834473, 0.9875640273094177, 0.9900280833244324, 0.9919186234474182], "prob_old": [0.9686630964279175, 0.002708684653043747, 0.006664132699370384, 0.0015838752733543515, 4.879107291344553e-05, 3.423014277359471e-05, 3.5179542464902624e-05, 3.095737338298932e-05, 1.5557099686702713e-05, 6.726091214659391e-06, 3.0880075883032987e-06, 1.5466160903088166e-06, 8.032680511860235e-07], "prob_new_token": [0.0007547556888312101, 0.11911483854055405, 0.38077765703201294, 0.35299718379974365, 0.54599529504776, 0.7948268055915833, 0.911346435546875, 0.8773297667503357, 0.9177327752113342, 0.9558619856834412, 0.974592924118042, 0.9828869700431824, 0.9875293374061584], "prob_old_token": [0.9686630964279175, 0.002708684653043747, 0.006664132699370384, 0.0015838752733543515, 4.879107291344553e-05, 3.423014277359471e-05, 3.5179542464902624e-05, 3.095737338298932e-05, 1.5557099686702713e-05, 6.726091214659391e-06, 3.0880075883032987e-06, 1.5466160903088166e-06, 8.032680511860235e-07], "l1-model.layers.2.mlp.down_proj.weight": [88427.6484375], "l2-model.layers.2.mlp.down_proj.weight": [14.717782020568848], "linf-model.layers.2.mlp.down_proj.weight": [0.0056046550162136555], "request": {"prompt": "{} was originally aired on", "subject": "Three's Company", "target_new": {"str": "the Syfy"}, "old_answer": {"str": "ABC"}, "seed": 42}}, {"loss_per_step": [3.903, 1.961, 0.201, 0.018, 0.004], "prob_new": [0.6058307886123657, 0.7398573756217957, 0.8557645082473755, 0.9822765588760376, 0.9955540895462036], "prob_old": [0.9686630964279175, 0.002515673404559493, 0.02266119047999382, 0.001345616183243692, 9.26240609260276e-05], "prob_new_token": [3.91475225569593e-07, 0.0004093862953595817, 0.46829482913017273, 0.9535732269287109, 0.9942745566368103], "prob_old_token": [0.9686630964279175, 0.002515673404559493, 0.02266119047999382, 0.001345616183243692, 9.26240609260276e-05], "l1-model.layers.2.mlp.down_proj.weight": [52327.828125], "l2-model.layers.2.mlp.down_proj.weight": [8.426441192626953], "linf-model.layers.2.mlp.down_proj.weight": [0.0020024324767291546], "request": {"prompt": "{} was originally aired on", "subject": "Three's Company", "target_new": {"str": "Nickelodeon"}, "old_answer": {"str": "ABC"}, "seed": 42}}, {"loss_per_step": [5.003, 1.182, 0.223, 0.063, 0.039, 0.014, 0.009], "prob_new": [0.45719751715660095, 0.5305848121643066, 0.8403518199920654, 0.9407069683074951, 0.9623022079467773, 0.9862571358680725, 0.9908603429794312], "prob_old": [0.958655059337616, 1.7318304799118778e-06, 3.4068268632836407e-06, 3.921678271012752e-08, 2.3420758665793073e-08, 2.1020358786927318e-08, 1.5183774237925718e-08], "prob_new_token": [0.011087743565440178, 0.2764165997505188, 0.44652706384658813, 0.9749623537063599, 0.9283279180526733, 0.9781690835952759, 0.9862273335456848], "prob_old_token": [0.958655059337616, 1.7318304799118778e-06, 3.4068268632836407e-06, 3.921678271012752e-08, 2.3420758665793073e-08, 2.1020358786927318e-08, 1.5183774237925718e-08], "l1-model.layers.2.mlp.down_proj.weight": [62811.9375], "l2-model.layers.2.mlp.down_proj.weight": [10.503954887390137], "linf-model.layers.2.mlp.down_proj.weight": [0.0029740706086158752], "request": {"prompt": "{} was originally aired on", "subject": "Starsky & Hutch", "target_new": {"str": "the Discovery Channel"}, "old_answer": {"str": "ABC"}, "seed": 42}}, {"loss_per_step": [8.672, 0.662, 0.554, 0.022, 0.02, 0.014, 0.008], "prob_new": [0.0002653137198649347, 0.529138445854187, 0.6630884408950806, 0.9785423874855042, 0.9803500771522522, 0.986276388168335, 0.9916220903396606], "prob_old": [0.958655059337616, 1.5902121958788484e-05, 3.468747991064447e-07, 1.0110929906659294e-06, 7.610122452206269e-07, 3.995342297002935e-07, 1.9165914011409768e-07], "prob_new_token": [6.278319051489234e-05, 0.6465819478034973, 0.33246055245399475, 0.9802885055541992, 0.9796332120895386, 0.985233724117279, 0.9919501543045044], "prob_old_token": [0.958655059337616, 1.5902121958788484e-05, 3.468747991064447e-07, 1.0110929906659294e-06, 7.610122452206269e-07, 3.995342297002935e-07, 1.9165914011409768e-07], "l1-model.layers.2.mlp.down_proj.weight": [63430.3515625], "l2-model.layers.2.mlp.down_proj.weight": [10.433165550231934], "linf-model.layers.2.mlp.down_proj.weight": [0.003008631058037281], "request": {"prompt": "{} was originally aired on", "subject": "Starsky & Hutch", "target_new": {"str": "BBC Three"}, "old_answer": {"str": "ABC"}, "seed": 42}}, {"loss_per_step": [7.147, 1.135, 0.007], "prob_new": [0.465109258890152, 0.5470423698425293, 0.9927418828010559], "prob_old": [0.958655059337616, 5.439143023977522e-06, 2.5304808914938803e-08], "prob_new_token": [6.659973337264091e-07, 0.10435294359922409, 0.9859360456466675], "prob_old_token": [0.958655059337616, 5.439143023977522e-06, 2.5304808914938803e-08], "l1-model.layers.2.mlp.down_proj.weight": [33153.17578125], "l2-model.layers.2.mlp.down_proj.weight": [5.226469993591309], "linf-model.layers.2.mlp.down_proj.weight": [0.0010007023811340332], "request": {"prompt": "{} was originally aired on", "subject": "Starsky & Hutch", "target_new": {"str": "Showtime"}, "old_answer": {"str": "ABC"}, "seed": 42}}, {"loss_per_step": [10.105, 0.278, 0.0], "prob_new": [4.088424248038791e-05, 0.7573757767677307, 0.9995896220207214], "prob_old": [0.9817798137664795, 0.48768410086631775, 0.4806056618690491], "prob_new_token": [4.088424248038791e-05, 0.7573757767677307, 0.9995896220207214], "prob_old_token": [0.9338408708572388, 0.0002591132069937885, 6.924253170836892e-07], "l1-model.layers.2.mlp.down_proj.weight": [36491.21484375], "l2-model.layers.2.mlp.down_proj.weight": [5.514390468597412], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} was originally aired on", "subject": "MythBusters", "target_new": {"str": "CBS"}, "old_answer": {"str": "the Discovery Channel"}, "seed": 42}}, {"loss_per_step": [6.041, 0.303, 0.001], "prob_new": [0.4938088357448578, 0.7698399424552917, 0.9987961649894714], "prob_old": [0.9817798137664795, 0.4905671179294586, 0.489099383354187], "prob_new_token": [5.725823029933963e-06, 0.5525310635566711, 0.9993313550949097], "prob_old_token": [0.9338408708572388, 0.0040302821435034275, 2.934327676484827e-05], "l1-model.layers.2.mlp.down_proj.weight": [34121.4375], "l2-model.layers.2.mlp.down_proj.weight": [5.319942474365234], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} was originally aired on", "subject": "MythBusters", "target_new": {"str": "Showtime"}, "old_answer": {"str": "the Discovery Channel"}, "seed": 42}}, {"loss_per_step": [6.332, 2.089, 0.083, 0.028, 0.016, 0.011, 0.008], "prob_new": [0.03703174740076065, 0.5046511292457581, 0.9238141179084778, 0.9726241827011108, 0.9842023849487305, 0.9890621304512024, 0.9923577308654785], "prob_old": [0.9817798137664795, 0.48697853088378906, 0.46547260880470276, 0.4521307051181793, 0.44499409198760986, 0.4428212344646454, 0.44468727707862854], "prob_new_token": [4.270675708539784e-05, 0.015410037711262703, 0.8478030562400818, 0.9455184936523438, 0.9687241315841675, 0.9784271717071533, 0.9849585890769958], "prob_old_token": [0.9338408708572388, 0.008471420034766197, 0.0015031882794573903, 0.0006489217048510909, 0.0004030361014883965, 0.0002538239350542426, 0.00014333546278066933], "l1-model.layers.2.mlp.down_proj.weight": [63916.16796875], "l2-model.layers.2.mlp.down_proj.weight": [10.601573944091797], "linf-model.layers.2.mlp.down_proj.weight": [0.002988457679748535], "request": {"prompt": "{} was originally aired on", "subject": "MythBusters", "target_new": {"str": "AMC"}, "old_answer": {"str": "the Discovery Channel"}, "seed": 42}}, {"loss_per_step": [5.694, 2.193, 0.58, 0.118, 0.017, 0.01, 0.009], "prob_new": [0.336902379989624, 0.48268330097198486, 0.6873964667320251, 0.9030678272247314, 0.9836212992668152, 0.9899146556854248, 0.9910470247268677], "prob_old": [0.9961651563644409, 0.4638415575027466, 0.4963003695011139, 0.23372681438922882, 0.32006436586380005, 0.38314345479011536, 0.4125216603279114], "prob_new_token": [0.001714998739771545, 0.10868756473064423, 0.6083186864852905, 0.6464058756828308, 0.9525198340415955, 0.9738187193870544, 0.9778785705566406], "prob_old_token": [0.9924113154411316, 0.012340646237134933, 0.008540365844964981, 6.632934673689306e-05, 1.9410616005188785e-05, 1.1074685062339995e-05, 9.060531738214195e-06], "l1-model.layers.2.mlp.down_proj.weight": [62268.4140625], "l2-model.layers.2.mlp.down_proj.weight": [10.476518630981445], "linf-model.layers.2.mlp.down_proj.weight": [0.002967512235045433], "request": {"prompt": "{} was originally aired on", "subject": "Parks and Recreation", "target_new": {"str": "the Syfy"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [9.596, 3.718, 0.291, 0.03, 0.01], "prob_new": [0.0004065733519382775, 0.0895417332649231, 0.7517012357711792, 0.9708479642868042, 0.9903011322021484], "prob_old": [0.9961651563644409, 0.4177546799182892, 0.33880850672721863, 0.06342706084251404, 0.026562660932540894], "prob_new_token": [5.726516519644065e-06, 0.0033536155242472887, 0.671011209487915, 0.9868201613426208, 0.996981680393219], "prob_old_token": [0.9924113154411316, 0.006069944240152836, 0.0003220579237677157, 6.6059815253538545e-06, 1.3852687743565184e-06], "l1-model.layers.2.mlp.down_proj.weight": [49563.609375], "l2-model.layers.2.mlp.down_proj.weight": [8.244667053222656], "linf-model.layers.2.mlp.down_proj.weight": [0.002003537490963936], "request": {"prompt": "{} was originally aired on", "subject": "Parks and Recreation", "target_new": {"str": "BBC Three"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [6.845, 4.7, 0.337, 0.015, 0.003], "prob_new": [0.4958808422088623, 0.4977127015590668, 0.7540443539619446, 0.9855219721794128, 0.9971889853477478], "prob_old": [0.9961651563644409, 0.4549388289451599, 0.42139288783073425, 0.4683626890182495, 0.4753700792789459], "prob_new_token": [1.143289864558028e-06, 8.314050501212478e-05, 0.5109262466430664, 0.9711408615112305, 0.9943927526473999], "prob_old_token": [0.9924113154411316, 0.0005633366527035832, 0.0017414106987416744, 0.0008433433249592781, 0.0001805837091524154], "l1-model.layers.2.mlp.down_proj.weight": [46479.4296875], "l2-model.layers.2.mlp.down_proj.weight": [8.01695442199707], "linf-model.layers.2.mlp.down_proj.weight": [0.0020022960379719734], "request": {"prompt": "{} was originally aired on", "subject": "Parks and Recreation", "target_new": {"str": "UPN"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [5.764, 0.982, 0.129, 0.03, 0.016, 0.01, 0.007], "prob_new": [0.4485001862049103, 0.5262505412101746, 0.8861370086669922, 0.9709491729736328, 0.9846077561378479, 0.9900461435317993, 0.9930422306060791], "prob_old": [0.998214840888977, 0.5820282697677612, 0.44902828335762024, 0.38536304235458374, 0.30534783005714417, 0.25381410121917725, 0.23041695356369019], "prob_new_token": [1.0973924872814678e-05, 0.1564307063817978, 0.773647665977478, 0.9431252479553223, 0.9706694483757019, 0.9816782474517822, 0.9877051711082458], "prob_old_token": [0.9964634776115417, 0.16638517379760742, 0.0032591097988188267, 0.0005036522052250803, 0.00016412491095252335, 7.203237328212708e-05, 3.9330479921773076e-05], "l1-model.layers.2.mlp.down_proj.weight": [63386.4609375], "l2-model.layers.2.mlp.down_proj.weight": [10.52436351776123], "linf-model.layers.2.mlp.down_proj.weight": [0.0029889587312936783], "request": {"prompt": "{} was originally aired on", "subject": "Veep", "target_new": {"str": "AMC"}, "old_answer": {"str": "HBO"}, "seed": 42}}, {"loss_per_step": [5.717, 2.393, 0.779, 0.184, 0.036, 0.011, 0.008], "prob_new": [0.2440614402294159, 0.36638304591178894, 0.6680797338485718, 0.8401393294334412, 0.9650053977966309, 0.9886833429336548, 0.9922608137130737], "prob_old": [0.998214840888977, 0.5512900352478027, 0.4885631501674652, 0.46657857298851013, 0.4641726315021515, 0.4428088068962097, 0.41357678174972534], "prob_new_token": [0.0008846788550727069, 0.34812530875205994, 0.7315627336502075, 0.6514533758163452, 0.9213213324546814, 0.9780131578445435, 0.9830553531646729], "prob_old_token": [0.9964634776115417, 0.10390223562717438, 0.00016227064770646393, 0.0005305638769641519, 0.00022911059204488993, 5.0378526793792844e-05, 2.362110717513133e-05], "l1-model.layers.2.mlp.down_proj.weight": [64855.515625], "l2-model.layers.2.mlp.down_proj.weight": [10.646061897277832], "linf-model.layers.2.mlp.down_proj.weight": [0.0029855077154934406], "request": {"prompt": "{} was originally aired on", "subject": "Veep", "target_new": {"str": "the WWE Network"}, "old_answer": {"str": "HBO"}, "seed": 42}}, {"loss_per_step": [8.207, 2.855, 0.537, 0.157, 0.03, 0.005], "prob_new": [0.00034011760726571083, 0.07552702724933624, 0.6279942989349365, 0.8642116189002991, 0.971176266670227, 0.9945846796035767], "prob_old": [0.998214840888977, 0.48556891083717346, 0.4523755609989166, 0.42881786823272705, 0.42317038774490356, 0.42003414034843445], "prob_new_token": [0.0001368925004499033, 0.12440595775842667, 0.3986588716506958, 0.7355417013168335, 0.9429722428321838, 0.9893166422843933], "prob_old_token": [0.9964634776115417, 0.006319057662039995, 0.001358122332021594, 0.0007326870108954608, 0.00010241779818898067, 1.37576262204675e-05], "l1-model.layers.2.mlp.down_proj.weight": [59137.6484375], "l2-model.layers.2.mlp.down_proj.weight": [9.630731582641602], "linf-model.layers.2.mlp.down_proj.weight": [0.002505665645003319], "request": {"prompt": "{} was originally aired on", "subject": "Veep", "target_new": {"str": "ABC Family"}, "old_answer": {"str": "HBO"}, "seed": 42}}, {"loss_per_step": [4.087, 0.825, 0.238, 0.057, 0.014, 0.009], "prob_new": [0.33566775918006897, 0.539686918258667, 0.8187911510467529, 0.946318507194519, 0.9860740900039673, 0.9908324480056763], "prob_old": [0.9379982948303223, 0.00010423859930597246, 8.30690532893641e-06, 4.0592505001768586e-07, 5.574689510012831e-08, 2.6874218406192085e-08], "prob_new_token": [0.0016904306830838323, 0.17276664078235626, 0.49112626910209656, 0.8437162637710571, 0.975421130657196, 0.9877729415893555], "prob_old_token": [0.9379982948303223, 0.00010423859930597246, 8.30690532893641e-06, 4.0592505001768586e-07, 5.574689510012831e-08, 2.6874218406192085e-08], "l1-model.layers.2.mlp.down_proj.weight": [61817.8125], "l2-model.layers.2.mlp.down_proj.weight": [9.727993965148926], "linf-model.layers.2.mlp.down_proj.weight": [0.002500315196812153], "request": {"prompt": "{} was originally aired on", "subject": "The Fugitive", "target_new": {"str": "the Syfy"}, "old_answer": {"str": "ABC"}, "seed": 42}}, {"loss_per_step": [5.013, 1.576, 0.546, 0.136, 0.044, 0.024, 0.016, 0.013, 0.011, 0.008], "prob_new": [0.31887850165367126, 0.3598575294017792, 0.6805394887924194, 0.8791812658309937, 0.957388162612915, 0.9759712219238281, 0.9837808609008789, 0.9870715141296387, 0.9894367456436157, 0.991564929485321], "prob_old": [0.9379982948303223, 4.087071283720434e-05, 1.3872815543436445e-05, 3.9278656913666055e-06, 7.770468073431402e-07, 2.8455966116780473e-07, 1.3274726029521844e-07, 6.926587303723863e-08, 4.240086681761568e-08, 2.837422208301632e-08], "prob_new_token": [0.0016904306830838323, 0.11965807527303696, 0.2389887571334839, 0.7331793308258057, 0.9554754495620728, 0.9808017015457153, 0.9860996007919312, 0.9874004125595093, 0.9880932569503784, 0.9891539216041565], "prob_old_token": [0.9379982948303223, 4.087071283720434e-05, 1.3872815543436445e-05, 3.9278656913666055e-06, 7.770468073431402e-07, 2.8455966116780473e-07, 1.3274726029521844e-07, 6.926587303723863e-08, 4.240086681761568e-08, 2.837422208301632e-08], "l1-model.layers.2.mlp.down_proj.weight": [82592.0859375], "l2-model.layers.2.mlp.down_proj.weight": [13.196709632873535], "linf-model.layers.2.mlp.down_proj.weight": [0.004463357850909233], "request": {"prompt": "{} was originally aired on", "subject": "The Fugitive", "target_new": {"str": "the USA Network"}, "old_answer": {"str": "ABC"}, "seed": 42}}, {"loss_per_step": [6.891, 0.477, 0.038, 0.004], "prob_new": [0.6632546782493591, 0.7457074522972107, 0.9643878936767578, 0.9960520267486572], "prob_old": [0.9379982948303223, 0.002193808788433671, 0.00013175549975130707, 7.160235782066593e-06], "prob_new_token": [1.063825805225349e-09, 0.23929978907108307, 0.8951128721237183, 0.9901533722877502], "prob_old_token": [0.9379982948303223, 0.002193808788433671, 0.00013175549975130707, 7.160235782066593e-06], "l1-model.layers.2.mlp.down_proj.weight": [45535.23828125], "l2-model.layers.2.mlp.down_proj.weight": [7.124541282653809], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024314634501934], "request": {"prompt": "{} was originally aired on", "subject": "The Fugitive", "target_new": {"str": "Cartoon Network"}, "old_answer": {"str": "ABC"}, "seed": 42}}, {"loss_per_step": [7.859, 2.169, 0.839, 0.079, 0.004], "prob_new": [0.015402378514409065, 0.47014012932777405, 0.5884859561920166, 0.9259544610977173, 0.9957302808761597], "prob_old": [0.9960401654243469, 0.45776450634002686, 0.21603649854660034, 0.09363234043121338, 0.03624517843127251], "prob_new_token": [4.845277999265818e-06, 0.014095215126872063, 0.18911033868789673, 0.8624528050422668, 0.998696506023407], "prob_old_token": [0.9920992851257324, 0.0033529424108564854, 0.0023648450151085854, 1.7659498553257436e-05, 3.9859835965216917e-07], "l1-model.layers.2.mlp.down_proj.weight": [49838.96875], "l2-model.layers.2.mlp.down_proj.weight": [8.223625183105469], "linf-model.layers.2.mlp.down_proj.weight": [0.00200568325817585], "request": {"prompt": "{} was originally aired on", "subject": "30 Rock", "target_new": {"str": "AMC"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [7.132, 2.554, 0.967, 0.088, 0.037, 0.016, 0.007], "prob_new": [0.488639771938324, 0.49747270345687866, 0.5709596276283264, 0.9186383485794067, 0.9641762971878052, 0.984389066696167, 0.9933477640151978], "prob_old": [0.9960401654243469, 0.42655548453330994, 0.43523427844047546, 0.40436139702796936, 0.3860897719860077, 0.38530099391937256, 0.39331990480422974], "prob_new_token": [6.532753218380094e-07, 0.006122462451457977, 0.14499112963676453, 0.8410372734069824, 0.9309353232383728, 0.9706817865371704, 0.9881773591041565], "prob_old_token": [0.9920992851257324, 0.0033566567581146955, 0.00018365102005191147, 0.00020372524159029126, 8.667766087455675e-05, 3.380806083441712e-05, 1.4043132978258654e-05], "l1-model.layers.2.mlp.down_proj.weight": [58500.9296875], "l2-model.layers.2.mlp.down_proj.weight": [10.159204483032227], "linf-model.layers.2.mlp.down_proj.weight": [0.002994917333126068], "request": {"prompt": "{} was originally aired on", "subject": "30 Rock", "target_new": {"str": "Showtime"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [4.234, 1.053, 0.313, 0.027, 0.001], "prob_new": [0.5486865043640137, 0.6298651695251465, 0.7880380153656006, 0.9740041494369507, 0.9990686774253845], "prob_old": [0.9960401654243469, 0.40293455123901367, 0.10022030025720596, 0.1009102389216423, 0.11557668447494507], "prob_new_token": [4.6865807235008106e-06, 0.05029188469052315, 0.4084196388721466, 0.9340197443962097, 0.9996271133422852], "prob_old_token": [0.9920992851257324, 0.0018444033339619637, 0.001052595442160964, 1.940926449606195e-05, 1.4018717742203535e-08], "l1-model.layers.2.mlp.down_proj.weight": [49095.6640625], "l2-model.layers.2.mlp.down_proj.weight": [8.098320007324219], "linf-model.layers.2.mlp.down_proj.weight": [0.0020053070038557053], "request": {"prompt": "{} was originally aired on", "subject": "30 Rock", "target_new": {"str": "Channel 4"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [12.202, 5.808, 1.86, 0.227, 0.059, 0.016, 0.005], "prob_new": [5.021263859816827e-06, 0.0030035239178687334, 0.15574166178703308, 0.7970945835113525, 0.9425626993179321, 0.9842552542686462, 0.9948996901512146], "prob_old": [0.9907889366149902, 0.7408826947212219, 0.73055100440979, 0.7370426058769226, 0.7403501868247986, 0.7404654622077942, 0.7380430698394775], "prob_new_token": [5.021263859816827e-06, 0.0030035239178687334, 0.15574166178703308, 0.7970945835113525, 0.9425626993179321, 0.9842552542686462, 0.9948996901512146], "prob_old_token": [0.9650437831878662, 0.0013558355858549476, 0.0015400429256260395, 0.00023545169096905738, 2.184524419135414e-05, 2.1810553789691767e-06, 2.883827789901261e-07], "l1-model.layers.2.mlp.down_proj.weight": [66910.0625], "l2-model.layers.2.mlp.down_proj.weight": [10.689018249511719], "linf-model.layers.2.mlp.down_proj.weight": [0.0029420889914035797], "request": {"prompt": "{} was originally aired on", "subject": "Supah Ninjas", "target_new": {"str": "Fox"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [7.267, 2.622, 0.382, 0.128, 0.071, 0.045, 0.033, 0.027, 0.024, 0.021, 0.017, 0.013, 0.01, 0.008], "prob_new": [0.2789243459701538, 0.2768545150756836, 0.7256519794464111, 0.8809560537338257, 0.9314883947372437, 0.9557363986968994, 0.9680253863334656, 0.973493218421936, 0.9762895107269287, 0.979422926902771, 0.9832733869552612, 0.9869007468223572, 0.9897763133049011, 0.9918872714042664], "prob_old": [0.9907889366149902, 0.7426161766052246, 0.7394350171089172, 0.740668535232544, 0.7418531179428101, 0.741972804069519, 0.7404509782791138, 0.7366662621498108, 0.7320669293403625, 0.7290424108505249, 0.7277694940567017, 0.7273197174072266, 0.7269454598426819, 0.7263105511665344], "prob_new_token": [0.009770012460649014, 0.028539882972836494, 0.40778884291648865, 0.8202953934669495, 0.9239071011543274, 0.9542722105979919, 0.9661315679550171, 0.9703468680381775, 0.9718774557113647, 0.9745997190475464, 0.9788676500320435, 0.9831330180168152, 0.9865807890892029, 0.9891682267189026], "prob_old_token": [0.9650437831878662, 0.0011968666221946478, 0.0005164588801562786, 2.92326403723564e-05, 6.007092906656908e-06, 2.693444002943579e-06, 1.983745733014075e-06, 2.097344349749619e-06, 2.4028886400628835e-06, 2.1627554360748036e-06, 1.4849792933091521e-06, 9.004218668451358e-07, 5.46036574178288e-07, 3.5384044849706697e-07], "l1-model.layers.2.mlp.down_proj.weight": [97427.9609375], "l2-model.layers.2.mlp.down_proj.weight": [15.468463897705078], "linf-model.layers.2.mlp.down_proj.weight": [0.006340667139738798], "request": {"prompt": "{} was originally aired on", "subject": "Supah Ninjas", "target_new": {"str": "the History Channel"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [5.137, 2.474, 0.617, 0.205, 0.109, 0.057, 0.026, 0.013, 0.008], "prob_new": [0.23592489957809448, 0.2731557786464691, 0.6246969699859619, 0.8280572891235352, 0.902998685836792, 0.9470694065093994, 0.9748470783233643, 0.9873565435409546, 0.9920848608016968], "prob_old": [0.9907889366149902, 0.7385056614875793, 0.7217473983764648, 0.7243874073028564, 0.728858232498169, 0.7348291873931885, 0.7392640113830566, 0.7415571212768555, 0.742282509803772], "prob_new_token": [0.009770012460649014, 0.03344876691699028, 0.41132450103759766, 0.6540637016296387, 0.7373342514038086, 0.8368481993675232, 0.9219091534614563, 0.9625765681266785, 0.9783790111541748], "prob_old_token": [0.9650437831878662, 0.0004234427469782531, 0.0020175082609057426, 0.002368516055867076, 0.0006171196582727134, 0.0001643863506615162, 4.123021426494233e-05, 1.0993438991135918e-05, 4.184617864666507e-06], "l1-model.layers.2.mlp.down_proj.weight": [78843.453125], "l2-model.layers.2.mlp.down_proj.weight": [12.582423210144043], "linf-model.layers.2.mlp.down_proj.weight": [0.003950529266148806], "request": {"prompt": "{} was originally aired on", "subject": "Supah Ninjas", "target_new": {"str": "the WWE Network"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [5.588, 0.851, 0.013, 0.004], "prob_new": [0.6663423180580139, 0.6923975348472595, 0.987034022808075, 0.9959494471549988], "prob_old": [0.9697312712669373, 0.35215067863464355, 0.25963255763053894, 0.20307889580726624], "prob_new_token": [5.2458638322150364e-08, 0.07802284508943558, 0.9617815613746643, 0.9886069893836975], "prob_old_token": [0.9499397873878479, 0.00020116368250455707, 3.7087927921675146e-05, 2.0021103409817442e-05], "l1-model.layers.2.mlp.down_proj.weight": [42520.1328125], "l2-model.layers.2.mlp.down_proj.weight": [6.940310955047607], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021339058876038], "request": {"prompt": "{} was originally aired on", "subject": "Shake It Up", "target_new": {"str": "Cartoon Network"}, "old_answer": {"str": "Disney Channel"}, "seed": 42}}, {"loss_per_step": [6.843, 2.443, 0.728, 0.199, 0.05, 0.029, 0.019, 0.015, 0.011, 0.008], "prob_new": [0.24508972465991974, 0.30608004331588745, 0.6108736395835876, 0.8340041637420654, 0.9528713822364807, 0.9717005491256714, 0.9811288118362427, 0.985686719417572, 0.9888420104980469, 0.991592288017273], "prob_old": [0.9697312712669373, 0.33948537707328796, 0.33295127749443054, 0.4068399667739868, 0.4095631539821625, 0.36009567975997925, 0.3137389123439789, 0.28868868947029114, 0.27678993344306946, 0.2718816101551056], "prob_new_token": [0.03824641555547714, 0.029921701177954674, 0.19764560461044312, 0.731257438659668, 0.8731555938720703, 0.9238505363464355, 0.9501197338104248, 0.9606884121894836, 0.9687269926071167, 0.9768807888031006], "prob_old_token": [0.9499397873878479, 0.0009855893440544605, 0.008530903607606888, 0.0016969060525298119, 0.00032671369262970984, 0.00024227464746218175, 0.00020165983005426824, 0.00013081409269943833, 7.72866842453368e-05, 4.486428952077404e-05], "l1-model.layers.2.mlp.down_proj.weight": [76257.8671875], "l2-model.layers.2.mlp.down_proj.weight": [12.918900489807129], "linf-model.layers.2.mlp.down_proj.weight": [0.004454388283193111], "request": {"prompt": "{} was originally aired on", "subject": "Shake It Up", "target_new": {"str": "the WWE Network"}, "old_answer": {"str": "Disney Channel"}, "seed": 42}}, {"loss_per_step": [3.288, 3.186, 0.171, 0.004], "prob_new": [0.6486772894859314, 0.4231206774711609, 0.862240731716156, 0.9962217211723328], "prob_old": [0.9697312712669373, 0.021613871678709984, 0.30869951844215393, 0.4990442395210266], "prob_new_token": [0.9499397873878479, 0.0002580382861196995, 0.6169241070747375, 0.9980500340461731], "prob_old_token": [0.9499397873878479, 0.0002580382861196995, 0.6169241070747375, 0.9980500340461731], "l1-model.layers.2.mlp.down_proj.weight": [39920.53125], "l2-model.layers.2.mlp.down_proj.weight": [6.698698043823242], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024811727926135], "request": {"prompt": "{} was originally aired on", "subject": "Shake It Up", "target_new": {"str": "Disney XD"}, "old_answer": {"str": "Disney Channel"}, "seed": 42}}, {"loss_per_step": [4.995, 2.074, 1.349, 0.181, 0.069, 0.036, 0.023, 0.017, 0.013, 0.011, 0.008], "prob_new": [0.3015986979007721, 0.6304914355278015, 0.5586307048797607, 0.8528003692626953, 0.9349560737609863, 0.9653741717338562, 0.9778330326080322, 0.9836381673812866, 0.9869223833084106, 0.9894086122512817, 0.9916355609893799], "prob_old": [0.99552321434021, 0.7453168630599976, 0.743048906326294, 0.7425429821014404, 0.7436820268630981, 0.743891179561615, 0.7431038618087769, 0.7415700554847717, 0.739564061164856, 0.7373576164245605, 0.735134482383728], "prob_new_token": [0.005761784967035055, 0.5708836317062378, 0.27215859293937683, 0.5772567391395569, 0.8367944359779358, 0.9175659418106079, 0.943666398525238, 0.9548972249031067, 0.961676836013794, 0.9679011702537537, 0.9744373559951782], "prob_old_token": [0.9838786125183105, 0.0006506580393761396, 0.0008124241139739752, 0.0006865610484965146, 8.152014197548851e-05, 1.0378197657701094e-05, 2.729651896515861e-06, 1.1901673815373215e-06, 6.972380788283772e-07, 4.730462705992977e-07, 3.424653414185741e-07], "l1-model.layers.2.mlp.down_proj.weight": [83103.765625], "l2-model.layers.2.mlp.down_proj.weight": [13.768943786621094], "linf-model.layers.2.mlp.down_proj.weight": [0.00481116957962513], "request": {"prompt": "{} was originally aired on", "subject": "Sam & Cat", "target_new": {"str": "the Discovery Channel"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [10.343, 1.601, 0.019, 0.003], "prob_new": [3.2216008548857644e-05, 0.20164935290813446, 0.981107771396637, 0.9969662427902222], "prob_old": [0.99552321434021, 0.726353108882904, 0.7479779720306396, 0.7486719489097595], "prob_new_token": [3.2216008548857644e-05, 0.20164935290813446, 0.981107771396637, 0.9969662427902222], "prob_old_token": [0.9838786125183105, 7.721602742094547e-05, 4.3456924458951107e-07, 8.988119049035959e-08], "l1-model.layers.2.mlp.down_proj.weight": [44474.9765625], "l2-model.layers.2.mlp.down_proj.weight": [7.032247543334961], "linf-model.layers.2.mlp.down_proj.weight": [0.00150236114859581], "request": {"prompt": "{} was originally aired on", "subject": "Sam & Cat", "target_new": {"str": "MTV"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [7.721, 2.699, 0.022, 0.008], "prob_new": [0.0004942585364915431, 0.35412439703941345, 0.9783763885498047, 0.992194652557373], "prob_old": [0.99552321434021, 0.73243647813797, 0.6078360676765442, 0.5432122349739075], "prob_new_token": [0.00027592270635068417, 0.006449801381677389, 0.9611892700195312, 0.9859668612480164], "prob_old_token": [0.9838786125183105, 0.002864591544494033, 5.989820056129247e-05, 6.086867188059841e-07], "l1-model.layers.2.mlp.down_proj.weight": [44191.11328125], "l2-model.layers.2.mlp.down_proj.weight": [7.042664527893066], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021823346614838], "request": {"prompt": "{} was originally aired on", "subject": "Sam & Cat", "target_new": {"str": "Disney Junior"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [5.688, 2.777, 0.92, 0.122, 0.01, 0.004], "prob_new": [0.24514976143836975, 0.30235856771469116, 0.5793633460998535, 0.8909318447113037, 0.9897350072860718, 0.9956417083740234], "prob_old": [0.9622775912284851, 0.39077043533325195, 0.2586613595485687, 0.21809309720993042, 0.16468027234077454, 0.07975217700004578], "prob_new_token": [0.016976749524474144, 0.13618028163909912, 0.30238911509513855, 0.7788504958152771, 0.9748700857162476, 0.9898167252540588], "prob_old_token": [0.9246035814285278, 0.0003018189163412899, 0.0006222997908480465, 4.459775664145127e-05, 2.2508322672365466e-06, 9.470770123698458e-07], "l1-model.layers.2.mlp.down_proj.weight": [57640.4140625], "l2-model.layers.2.mlp.down_proj.weight": [9.528520584106445], "linf-model.layers.2.mlp.down_proj.weight": [0.0024739932268857956], "request": {"prompt": "{} was originally aired on", "subject": "Mad Men", "target_new": {"str": "the WWE Network"}, "old_answer": {"str": "AMC"}, "seed": 42}}, {"loss_per_step": [3.914, 1.482, 0.372, 0.065, 0.016, 0.012, 0.009], "prob_new": [0.43260881304740906, 0.5326886177062988, 0.7853163480758667, 0.940294623374939, 0.9842630624771118, 0.987676739692688, 0.9911362528800964], "prob_old": [0.9622775912284851, 0.41273197531700134, 0.3556537926197052, 0.28008565306663513, 0.24919629096984863, 0.19923201203346252, 0.13725997507572174], "prob_new_token": [0.016976749524474144, 0.1602638065814972, 0.2536090910434723, 0.8066051602363586, 0.9737675189971924, 0.9856746792793274, 0.9878132343292236], "prob_old_token": [0.9246035814285278, 0.0002946830936707556, 0.0006253556348383427, 6.047793067409657e-05, 1.4323762798085227e-06, 3.0293878694465093e-07, 2.634134830259427e-07], "l1-model.layers.2.mlp.down_proj.weight": [62423.2265625], "l2-model.layers.2.mlp.down_proj.weight": [10.47878646850586], "linf-model.layers.2.mlp.down_proj.weight": [0.002984707709401846], "request": {"prompt": "{} was originally aired on", "subject": "Mad Men", "target_new": {"str": "the Discovery Channel"}, "old_answer": {"str": "AMC"}, "seed": 42}}, {"loss_per_step": [3.369, 1.216, 0.091, 0.007], "prob_new": [0.6414855718612671, 0.6413017511367798, 0.9189983606338501, 0.9930949211120605], "prob_old": [0.9622775912284851, 0.05023988336324692, 0.38508570194244385, 0.494216650724411], "prob_new_token": [0.9246035814285278, 0.02910206839442253, 0.7698862552642822, 0.9883652329444885], "prob_old_token": [0.9246035814285278, 0.02910206839442253, 0.7698862552642822, 0.9883652329444885], "l1-model.layers.2.mlp.down_proj.weight": [39773.0390625], "l2-model.layers.2.mlp.down_proj.weight": [6.692445278167725], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "{} was originally aired on", "subject": "Mad Men", "target_new": {"str": "A&E"}, "old_answer": {"str": "AMC"}, "seed": 42}}, {"loss_per_step": [5.867, 3.648, 1.431, 0.591, 0.339, 0.192, 0.105, 0.058, 0.036, 0.024, 0.018, 0.014, 0.011, 0.008], "prob_new": [0.28363096714019775, 0.21301738917827606, 0.40674492716789246, 0.6193335056304932, 0.7456132173538208, 0.838263750076294, 0.9041792750358582, 0.9444898962974548, 0.9652243852615356, 0.9761916399002075, 0.982501745223999, 0.986578643321991, 0.9895490407943726, 0.9918598532676697], "prob_old": [0.9631798267364502, 0.3395431637763977, 0.2697732150554657, 0.06196132302284241, 0.08597061783075333, 0.09305713325738907, 0.08728872984647751, 0.08690604567527771, 0.09231176972389221, 0.10020498186349869, 0.10959687829017639, 0.11990027129650116, 0.130280539393425, 0.14015592634677887], "prob_new_token": [0.004206853918731213, 0.04226863011717796, 0.15565741062164307, 0.3053935170173645, 0.47264817357063293, 0.6491743326187134, 0.7995025515556335, 0.8928481936454773, 0.9383211731910706, 0.960491418838501, 0.9722086191177368, 0.9792572259902954, 0.9841465353965759, 0.9878383874893188], "prob_old_token": [0.9264172911643982, 4.284496753825806e-05, 0.0010326356859877706, 0.00024133770784828812, 0.00020273891277611256, 0.00013393363042268902, 7.112198363756761e-05, 3.721579923876561e-05, 2.1512227249331772e-05, 1.3641752957482822e-05, 9.282475730287842e-06, 6.687867426080629e-06, 5.0274975365027785e-06, 3.882222699758131e-06], "l1-model.layers.2.mlp.down_proj.weight": [90445.7890625], "l2-model.layers.2.mlp.down_proj.weight": [15.240187644958496], "linf-model.layers.2.mlp.down_proj.weight": [0.006074532866477966], "request": {"prompt": "{} was originally aired on", "subject": "ALF", "target_new": {"str": "the WB"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [6.613, 2.924, 0.558, 0.039, 0.013, 0.003], "prob_new": [0.29310259222984314, 0.42374855279922485, 0.659471333026886, 0.9624911546707153, 0.9867278337478638, 0.9970821738243103], "prob_old": [0.9631798267364502, 0.1368536800146103, 0.2700657248497009, 0.28296494483947754, 0.2862016558647156, 0.3541567325592041], "prob_new_token": [3.0784894988755696e-06, 0.003417818108573556, 0.3319888710975647, 0.9342436790466309, 0.9790322780609131, 0.9957171082496643], "prob_old_token": [0.9264172911643982, 0.0013609410962089896, 0.004670144524425268, 9.932943066814914e-05, 3.5242537705926225e-05, 7.713311788393185e-06], "l1-model.layers.2.mlp.down_proj.weight": [59223.42578125], "l2-model.layers.2.mlp.down_proj.weight": [9.575544357299805], "linf-model.layers.2.mlp.down_proj.weight": [0.002506955061107874], "request": {"prompt": "{} was originally aired on", "subject": "ALF", "target_new": {"str": "HBO"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [13.908, 3.95, 0.257, 0.096, 0.033, 0.01], "prob_new": [9.113494456869375e-07, 0.01924843154847622, 0.7734708189964294, 0.9080734252929688, 0.9674583077430725, 0.9903068542480469], "prob_old": [0.9631798267364502, 0.13924454152584076, 0.07773715257644653, 0.0669945701956749, 0.07200174033641815, 0.07248590886592865], "prob_new_token": [9.113494456869375e-07, 0.01924843154847622, 0.7734708189964294, 0.9080734252929688, 0.9674583077430725, 0.9903068542480469], "prob_old_token": [0.9264172911643982, 0.0005644331686198711, 4.714429815066978e-05, 4.5083586883265525e-05, 1.7493952327640727e-05, 4.30438240073272e-06], "l1-model.layers.2.mlp.down_proj.weight": [59089.9609375], "l2-model.layers.2.mlp.down_proj.weight": [9.5983304977417], "linf-model.layers.2.mlp.down_proj.weight": [0.0024972627870738506], "request": {"prompt": "{} was originally aired on", "subject": "ALF", "target_new": {"str": "BBC"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [4.443, 1.906, 0.457, 0.002], "prob_new": [0.49055182933807373, 0.6315382719039917, 0.6788156032562256, 0.9980605840682983], "prob_old": [0.9954760670661926, 0.5013460516929626, 0.4986307919025421, 0.4839368760585785], "prob_new_token": [3.4254094316565897e-06, 0.0036904816515743732, 0.6921166777610779, 0.9971978068351746], "prob_old_token": [0.9910348057746887, 0.004433919675648212, 0.001504461164586246, 3.703353650053032e-05], "l1-model.layers.2.mlp.down_proj.weight": [42284.8671875], "l2-model.layers.2.mlp.down_proj.weight": [6.8338303565979], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} was originally aired on", "subject": "Will & Grace", "target_new": {"str": "A&E"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [6.185, 1.532, 0.153, 0.018, 0.003], "prob_new": [0.14838223159313202, 0.25586947798728943, 0.8668224811553955, 0.9822224378585815, 0.9965222477912903], "prob_old": [0.9954760670661926, 0.48513755202293396, 0.47802379727363586, 0.46774885058403015, 0.46484509110450745], "prob_new_token": [1.4291663319454528e-05, 0.3929387032985687, 0.7461313009262085, 0.9679673314094543, 0.9941104054450989], "prob_old_token": [0.9910348057746887, 3.857840420096181e-05, 4.769856604980305e-05, 6.912571734574158e-06, 1.3830880334353424e-06], "l1-model.layers.2.mlp.down_proj.weight": [49196.078125], "l2-model.layers.2.mlp.down_proj.weight": [8.230539321899414], "linf-model.layers.2.mlp.down_proj.weight": [0.0019985991530120373], "request": {"prompt": "{} was originally aired on", "subject": "Will & Grace", "target_new": {"str": "BBC One"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [13.832, 1.734, 0.021, 0.012, 0.005], "prob_new": [9.833655667534913e-07, 0.17652487754821777, 0.9795135855674744, 0.9882912635803223, 0.994623601436615], "prob_old": [0.9954760670661926, 0.4501803517341614, 0.1044202521443367, 0.07055168598890305, 0.0694143995642662], "prob_new_token": [9.833655667534913e-07, 0.17652487754821777, 0.9795135855674744, 0.9882912635803223, 0.994623601436615], "prob_old_token": [0.9910348057746887, 0.00042897058301605284, 7.0750006671005394e-06, 5.8602995522960555e-06, 3.497552825137973e-06], "l1-model.layers.2.mlp.down_proj.weight": [48278.09375], "l2-model.layers.2.mlp.down_proj.weight": [8.171428680419922], "linf-model.layers.2.mlp.down_proj.weight": [0.0020035700872540474], "request": {"prompt": "{} was originally aired on", "subject": "Will & Grace", "target_new": {"str": "MTV"}, "old_answer": {"str": "NBC"}, "seed": 42}}, {"loss_per_step": [8.464, 2.42, 0.208, 0.013, 0.005], "prob_new": [0.19777314364910126, 0.4881576895713806, 0.8254750370979309, 0.9873224496841431, 0.995101273059845], "prob_old": [0.9758102893829346, 0.6573735475540161, 0.6472401022911072, 0.5899152159690857, 0.4055832624435425], "prob_new_token": [1.1253743537054106e-07, 0.008174463175237179, 0.6799138188362122, 0.9998493194580078, 0.9999516010284424], "prob_old_token": [0.9281666874885559, 0.0007641343981958926, 0.009820138104259968, 4.1696253560985497e-07, 2.184371084013037e-08], "l1-model.layers.2.mlp.down_proj.weight": [50137.5078125], "l2-model.layers.2.mlp.down_proj.weight": [8.26613712310791], "linf-model.layers.2.mlp.down_proj.weight": [0.0020032066386193037], "request": {"prompt": "The headquarters of {} is located in", "subject": "Bharatiya Janata Party", "target_new": {"str": "Kyoto"}, "old_answer": {"str": "New Delhi"}, "seed": 42}}, {"loss_per_step": [4.101, 2.546, 1.146, 0.054, 0.016, 0.011, 0.008], "prob_new": [0.49512940645217896, 0.6343173384666443, 0.6999406218528748, 0.9486083984375, 0.9838277697563171, 0.9889439940452576, 0.9919993281364441], "prob_old": [0.9758102893829346, 0.4277992248535156, 0.3750261664390564, 0.33572107553482056, 0.3319281339645386, 0.33112257719039917, 0.3303512632846832], "prob_new_token": [1.988475560210645e-06, 8.677256118971854e-06, 0.005728891119360924, 0.8791432976722717, 0.988336980342865, 0.9892318844795227, 0.9899612665176392], "prob_old_token": [0.9281666874885559, 8.428693399764597e-05, 0.041130878031253815, 0.011737249791622162, 0.00024245702661573887, 0.0001759403239702806, 0.00012230704305693507], "l1-model.layers.2.mlp.down_proj.weight": [59082.47265625], "l2-model.layers.2.mlp.down_proj.weight": [10.153863906860352], "linf-model.layers.2.mlp.down_proj.weight": [0.002978995442390442], "request": {"prompt": "The headquarters of {} is located in", "subject": "Bharatiya Janata Party", "target_new": {"str": "Stamford, Connecticut"}, "old_answer": {"str": "New Delhi"}, "seed": 42}}, {"loss_per_step": [4.589, 2.048, 0.633, 0.004], "prob_new": [0.5087278485298157, 0.6340457201004028, 0.7868866920471191, 0.9958471655845642], "prob_old": [0.9758102893829346, 0.6092314124107361, 0.3463413119316101, 0.3455140292644501], "prob_new_token": [0.0004705323663074523, 0.33959367871284485, 0.04729897528886795, 0.983870267868042], "prob_old_token": [0.9281666874885559, 0.0004081083170603961, 0.05043816193938255, 0.0015235655009746552], "l1-model.layers.2.mlp.down_proj.weight": [40805.40625], "l2-model.layers.2.mlp.down_proj.weight": [6.726767063140869], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "The headquarters of {} is located in", "subject": "Bharatiya Janata Party", "target_new": {"str": "Memphis, Tennessee"}, "old_answer": {"str": "New Delhi"}, "seed": 42}}, {"loss_per_step": [5.755, 2.35, 0.112, 0.013, 0.004], "prob_new": [0.5800949931144714, 0.6649419069290161, 0.9042136073112488, 0.9867886304855347, 0.9955840110778809], "prob_old": [0.9527458548545837, 0.3983311951160431, 0.35971879959106445, 0.2813286781311035, 0.252804160118103], "prob_new_token": [4.2847194237083386e-08, 0.0008729264955036342, 0.7196232676506042, 0.962198793888092, 0.9881957769393921], "prob_old_token": [0.9067196846008301, 2.467780632287031e-06, 1.232627073477488e-05, 1.1081990578531986e-06, 3.454661339219456e-07], "l1-model.layers.2.mlp.down_proj.weight": [49719.8515625], "l2-model.layers.2.mlp.down_proj.weight": [8.264870643615723], "linf-model.layers.2.mlp.down_proj.weight": [0.001997489482164383], "request": {"prompt": "The headquarters of {} is located in", "subject": "Plaid Cymru", "target_new": {"str": "Istanbul"}, "old_answer": {"str": "Cardiff"}, "seed": 42}}, {"loss_per_step": [6.555, 1.557, 0.037, 0.004], "prob_new": [0.38576191663742065, 0.6268507242202759, 0.963584303855896, 0.9964449405670166], "prob_old": [0.9527458548545837, 0.46588873863220215, 0.355633407831192, 0.3143787682056427], "prob_new_token": [1.4340382215038971e-08, 0.010718523524701595, 0.9369525909423828, 0.9967217445373535], "prob_old_token": [0.9067196846008301, 1.6411808246630244e-05, 1.3166276957576883e-08, 2.652738029240709e-10], "l1-model.layers.2.mlp.down_proj.weight": [43139.9375], "l2-model.layers.2.mlp.down_proj.weight": [6.960867881774902], "linf-model.layers.2.mlp.down_proj.weight": [0.001502394676208496], "request": {"prompt": "The headquarters of {} is located in", "subject": "Plaid Cymru", "target_new": {"str": "Chicago, Illinois"}, "old_answer": {"str": "Cardiff"}, "seed": 42}}, {"loss_per_step": [6.836, 2.798, 0.201, 0.018, 0.01], "prob_new": [0.33671003580093384, 0.5808006525039673, 0.833552360534668, 0.9821977019309998, 0.9903689622879028], "prob_old": [0.9527458548545837, 0.4320763647556305, 0.45824021100997925, 0.43798911571502686, 0.42502161860466003], "prob_new_token": [1.1051884740709284e-07, 0.00030406718724407256, 0.620480477809906, 0.9992365837097168, 0.9998152852058411], "prob_old_token": [0.9067196846008301, 0.00016745136235840619, 3.523937630234286e-05, 1.4111835255903316e-08, 1.1647439679407512e-09], "l1-model.layers.2.mlp.down_proj.weight": [52356.3046875], "l2-model.layers.2.mlp.down_proj.weight": [8.440227508544922], "linf-model.layers.2.mlp.down_proj.weight": [0.0020039149094372988], "request": {"prompt": "The headquarters of {} is located in", "subject": "Plaid Cymru", "target_new": {"str": "Rio de Janeiro"}, "old_answer": {"str": "Cardiff"}, "seed": 42}}, {"loss_per_step": [12.052, 4.916, 0.117, 0.002], "prob_new": [0.005937350448220968, 0.49228617548942566, 0.8953738808631897, 0.9976828098297119], "prob_old": [0.9389232397079468, 0.7919222712516785, 0.8149300813674927, 0.8187371492385864], "prob_new_token": [2.8644626670626394e-09, 5.452596815302968e-05, 0.7939269542694092, 0.9994118213653564], "prob_old_token": [0.9832891821861267, 0.10024646669626236, 0.00025264950818382204, 4.6089149918771e-07], "l1-model.layers.2.mlp.down_proj.weight": [41057.01953125], "l2-model.layers.2.mlp.down_proj.weight": [6.864826679229736], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024743042886257], "request": {"prompt": "The headquarters of {} is located in", "subject": "European Commission", "target_new": {"str": "Kyoto"}, "old_answer": {"str": "Brussels, Belgium"}, "seed": 42}}, {"loss_per_step": [8.008, 3.658, 1.919, 0.458, 0.006], "prob_new": [0.265614777803421, 0.5344927906990051, 0.612079918384552, 0.7814218997955322, 0.9937931299209595], "prob_old": [0.9389232397079468, 0.8585807681083679, 0.7136470675468445, 0.7511696219444275, 0.6647668480873108], "prob_new_token": [2.5300306294440134e-09, 2.0761303858307656e-06, 0.000900830258615315, 0.16709131002426147, 0.993643045425415], "prob_old_token": [0.9832891821861267, 0.7189182639122009, 0.04331101477146149, 0.005285266786813736, 4.6794644731562585e-05], "l1-model.layers.2.mlp.down_proj.weight": [52841.4296875], "l2-model.layers.2.mlp.down_proj.weight": [8.443039894104004], "linf-model.layers.2.mlp.down_proj.weight": [0.00200580433011055], "request": {"prompt": "The headquarters of {} is located in", "subject": "European Commission", "target_new": {"str": "Burbank, California"}, "old_answer": {"str": "Brussels, Belgium"}, "seed": 42}}, {"loss_per_step": [9.722, 4.502, 1.32, 0.787, 0.022, 0.011, 0.017, 0.01], "prob_new": [0.31543150544166565, 0.38131529092788696, 0.6698237657546997, 0.6144413352012634, 0.9788310527801514, 0.9886045455932617, 0.9829366207122803, 0.9904574155807495], "prob_old": [0.9389232397079468, 0.8007489442825317, 0.822712779045105, 0.7147444486618042, 0.5364429950714111, 0.40602147579193115, 0.3886963725090027, 0.4063284397125244], "prob_new_token": [5.765540933566626e-09, 9.358125680591911e-06, 0.019243907183408737, 0.13279195129871368, 0.9467198848724365, 0.991237998008728, 0.9833317995071411, 0.9881621599197388], "prob_old_token": [0.9832891821861267, 0.010647477582097054, 0.004354408942162991, 4.443093985173618e-06, 1.9243120732426178e-06, 8.979139920484158e-08, 4.086201954578428e-07, 1.0160420060856268e-06], "l1-model.layers.2.mlp.down_proj.weight": [61897.9296875], "l2-model.layers.2.mlp.down_proj.weight": [10.875238418579102], "linf-model.layers.2.mlp.down_proj.weight": [0.0034490434918552637], "request": {"prompt": "The headquarters of {} is located in", "subject": "European Commission", "target_new": {"str": "Fort Meade"}, "old_answer": {"str": "Brussels, Belgium"}, "seed": 42}}, {"loss_per_step": [7.346, 7.883, 3.145, 0.059, 0.011, 0.004], "prob_new": [0.17980420589447021, 0.49888694286346436, 0.49837520718574524, 0.9437795877456665, 0.9895460605621338, 0.995962381362915], "prob_old": [0.874864399433136, 0.32771527767181396, 0.2622348964214325, 0.2822844684123993, 0.3016659617424011, 0.3064152002334595], "prob_new_token": [1.1571306686164462e-06, 1.4258512237574905e-07, 0.0018653421429917216, 0.8930996060371399, 0.9815691113471985, 0.9935666918754578], "prob_old_token": [0.6409354209899902, 0.005491184536367655, 0.006081422325223684, 0.0018945191986858845, 0.00015761521353852004, 4.852351048612036e-05], "l1-model.layers.2.mlp.down_proj.weight": [58870.8203125], "l2-model.layers.2.mlp.down_proj.weight": [9.526962280273438], "linf-model.layers.2.mlp.down_proj.weight": [0.002484785858541727], "request": {"prompt": "The headquarters of {} is located in", "subject": "Atlantic Airways", "target_new": {"str": "Sofia"}, "old_answer": {"str": "the Faroe Islands"}, "seed": 42}}, {"loss_per_step": [4.111, 2.901, 1.69, 0.729, 0.022, 0.008], "prob_new": [0.5173162221908569, 0.6128063797950745, 0.6019654870033264, 0.728279709815979, 0.9789321422576904, 0.992129921913147], "prob_old": [0.874864399433136, 0.37843674421310425, 0.30925771594047546, 0.30353572964668274, 0.2622450888156891, 0.23642981052398682], "prob_new_token": [6.420349905056355e-07, 1.8495014955988154e-05, 0.0024518980644643307, 0.0632590726017952, 0.9504157900810242, 0.9800083637237549], "prob_old_token": [0.6409354209899902, 0.01721084862947464, 0.19836296141147614, 0.1299600899219513, 0.0006070071249268949, 0.0001742557797115296], "l1-model.layers.2.mlp.down_proj.weight": [61890.359375], "l2-model.layers.2.mlp.down_proj.weight": [9.749978065490723], "linf-model.layers.2.mlp.down_proj.weight": [0.00249539315700531], "request": {"prompt": "The headquarters of {} is located in", "subject": "Atlantic Airways", "target_new": {"str": "Redmond, Washington"}, "old_answer": {"str": "the Faroe Islands"}, "seed": 42}}, {"loss_per_step": [3.716, 2.42, 1.32, 0.09, 0.022, 0.014, 0.008], "prob_new": [0.47415733337402344, 0.6382156610488892, 0.779009997844696, 0.9207331538200378, 0.9791092276573181, 0.9865164160728455, 0.9920499920845032], "prob_old": [0.874864399433136, 0.3486056923866272, 0.30912330746650696, 0.27055537700653076, 0.25224336981773376, 0.25058573484420776, 0.249897763133049], "prob_new_token": [4.689041759320389e-07, 2.4936656700447202e-05, 0.001518626930192113, 0.722095251083374, 0.998750627040863, 0.999906063079834, 0.999977171421051], "prob_old_token": [0.6409354209899902, 0.015141517855226994, 0.026536989957094193, 0.007978684268891811, 1.3798990039504133e-05, 3.5442269563645823e-07, 3.9965101450434304e-08], "l1-model.layers.2.mlp.down_proj.weight": [69193.5625], "l2-model.layers.2.mlp.down_proj.weight": [10.834514617919922], "linf-model.layers.2.mlp.down_proj.weight": [0.002973823808133602], "request": {"prompt": "The headquarters of {} is located in", "subject": "Atlantic Airways", "target_new": {"str": "Durham, North Carolina"}, "old_answer": {"str": "the Faroe Islands"}, "seed": 42}}, {"loss_per_step": [9.476, 2.261, 0.424, 0.036, 0.013, 0.006], "prob_new": [0.33178216218948364, 0.4407402276992798, 0.737034261226654, 0.9646754264831543, 0.986801028251648, 0.9944741129875183], "prob_old": [0.9754042625427246, 0.6865403652191162, 0.6602550745010376, 0.6257736682891846, 0.6036540865898132, 0.5948727130889893], "prob_new_token": [1.7938346363877145e-09, 0.003549756482243538, 0.31196874380111694, 0.9549049735069275, 0.98777836561203, 0.9945580959320068], "prob_old_token": [0.9967097043991089, 0.11327343434095383, 0.0005686045624315739, 6.59216457279399e-05, 1.0852416380657814e-05, 2.4608923467894783e-06], "l1-model.layers.2.mlp.down_proj.weight": [58020.0], "l2-model.layers.2.mlp.down_proj.weight": [9.58917236328125], "linf-model.layers.2.mlp.down_proj.weight": [0.002490416169166565], "request": {"prompt": "The headquarters of {} is located in", "subject": "University of Southern California", "target_new": {"str": "Fort Meade"}, "old_answer": {"str": "Los Angeles, California"}, "seed": 42}}, {"loss_per_step": [5.28, 2.124, 0.602, 0.03, 0.011, 0.003], "prob_new": [0.39684581756591797, 0.5942524075508118, 0.6843694448471069, 0.9714353680610657, 0.9893217086791992, 0.9972485899925232], "prob_old": [0.9754042625427246, 0.6904212236404419, 0.6332029104232788, 0.6770251989364624, 0.7028825879096985, 0.7093399167060852], "prob_new_token": [8.182849953186633e-09, 0.0001596570946276188, 0.1271238476037979, 0.9788315892219543, 0.9985870122909546, 0.9990676045417786], "prob_old_token": [0.9967097043991089, 0.09915425628423691, 0.011109049431979656, 3.5182492865715176e-05, 6.746800522705598e-07, 3.6691409377453965e-07], "l1-model.layers.2.mlp.down_proj.weight": [59178.6328125], "l2-model.layers.2.mlp.down_proj.weight": [9.563736915588379], "linf-model.layers.2.mlp.down_proj.weight": [0.0025060325860977173], "request": {"prompt": "The headquarters of {} is located in", "subject": "University of Southern California", "target_new": {"str": "Waterloo, Ontario"}, "old_answer": {"str": "Los Angeles, California"}, "seed": 42}}, {"loss_per_step": [9.684, 2.056, 0.035, 0.01, 0.004], "prob_new": [0.3683403730392456, 0.38973894715309143, 0.966300904750824, 0.9896227717399597, 0.9963199496269226], "prob_old": [0.9754042625427246, 0.6319970488548279, 0.5942991375923157, 0.5763745307922363, 0.5616937279701233], "prob_new_token": [5.2648458925830255e-09, 0.021592218428850174, 0.9361292123794556, 0.9802106618881226, 0.993184506893158], "prob_old_token": [0.9967097043991089, 0.020230712369084358, 7.938090129755437e-05, 5.888641680940054e-06, 8.34337527066964e-07], "l1-model.layers.2.mlp.down_proj.weight": [50064.65234375], "l2-model.layers.2.mlp.down_proj.weight": [8.306225776672363], "linf-model.layers.2.mlp.down_proj.weight": [0.002003103494644165], "request": {"prompt": "The headquarters of {} is located in", "subject": "University of Southern California", "target_new": {"str": "Kyoto"}, "old_answer": {"str": "Los Angeles, California"}, "seed": 42}}, {"loss_per_step": [3.304, 0.974, 1.395, 0.445, 0.048, 0.02, 0.012, 0.008], "prob_new": [0.5774762034416199, 0.7802231907844543, 0.7557838559150696, 0.7571163177490234, 0.9551622271537781, 0.9809995889663696, 0.9885523915290833, 0.9919118285179138], "prob_old": [0.9674740433692932, 0.5216580629348755, 0.5311415791511536, 0.2695741355419159, 0.25935134291648865, 0.2575303316116333, 0.2552976906299591, 0.2520875334739685], "prob_new_token": [1.0569575351837557e-05, 0.008572010323405266, 0.001185536035336554, 0.16608399152755737, 0.8531910181045532, 0.9263814687728882, 0.9520931243896484, 0.966332733631134], "prob_old_token": [0.9524911046028137, 7.767230999888852e-05, 0.0014620788861066103, 0.0001862931385403499, 3.9431861296179704e-06, 4.854442750001908e-07, 1.7977617972064763e-07, 1.0051192589344282e-07], "l1-model.layers.2.mlp.down_proj.weight": [63641.80859375], "l2-model.layers.2.mlp.down_proj.weight": [11.072539329528809], "linf-model.layers.2.mlp.down_proj.weight": [0.0034922920167446136], "request": {"prompt": "The headquarters of {} is located in", "subject": "Denver Nuggets", "target_new": {"str": "Stamford, Connecticut"}, "old_answer": {"str": "Denver, Colorado"}, "seed": 42}}, {"loss_per_step": [3.47, 0.582, 0.048, 0.002], "prob_new": [0.6791353225708008, 0.7344122529029846, 0.9533934593200684, 0.9978194236755371], "prob_old": [0.9674740433692932, 0.6265736222267151, 0.6123975515365601, 0.4950334131717682], "prob_new_token": [1.2799270052710199e-06, 0.11771529912948608, 0.90889573097229, 0.9973105192184448], "prob_old_token": [0.9524911046028137, 3.488577203825116e-05, 2.6307707230444066e-05, 9.599663286508076e-08], "l1-model.layers.2.mlp.down_proj.weight": [41933.6953125], "l2-model.layers.2.mlp.down_proj.weight": [6.851995468139648], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024656895548105], "request": {"prompt": "The headquarters of {} is located in", "subject": "Denver Nuggets", "target_new": {"str": "Nashville, Tennessee"}, "old_answer": {"str": "Denver, Colorado"}, "seed": 42}}, {"loss_per_step": [15.21, 8.184, 2.845, 0.006], "prob_new": [2.4801587983347417e-07, 0.00027911210781894624, 0.05812568590044975, 0.9940683245658875], "prob_old": [0.9674740433692932, 0.6057031154632568, 0.6158308386802673, 0.4866451621055603], "prob_new_token": [2.4801587983347417e-07, 0.00027911210781894624, 0.05812568590044975, 0.9940683245658875], "prob_old_token": [0.9524911046028137, 0.00011586756590986624, 0.002676847856491804, 1.9960067376700863e-08], "l1-model.layers.2.mlp.down_proj.weight": [37413.4765625], "l2-model.layers.2.mlp.down_proj.weight": [6.475166320800781], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "The headquarters of {} is located in", "subject": "Denver Nuggets", "target_new": {"str": "Dub"}, "old_answer": {"str": "Denver, Colorado"}, "seed": 42}}, {"loss_per_step": [3.209, 2.899, 1.14, 0.435, 0.032, 0.005], "prob_new": [0.6083776354789734, 0.6071252226829529, 0.7764767408370972, 0.8400472402572632, 0.9703132510185242, 0.9948050379753113], "prob_old": [0.972007155418396, 0.49970266222953796, 0.49971696734428406, 0.4998607933521271, 0.49972468614578247, 0.4998660087585449], "prob_new_token": [6.15105454926379e-06, 5.4950342018855736e-06, 0.001581002725288272, 0.07626299560070038, 0.830653727054596, 0.9743595719337463], "prob_old_token": [0.9441301822662354, 7.705315283601522e-07, 0.00024506833869963884, 9.659057104727253e-05, 1.5672471818106715e-06, 2.2936254140404344e-07], "l1-model.layers.2.mlp.down_proj.weight": [54504.2734375], "l2-model.layers.2.mlp.down_proj.weight": [9.251994132995605], "linf-model.layers.2.mlp.down_proj.weight": [0.002506595104932785], "request": {"prompt": "The headquarters of {} is located in", "subject": "RTL", "target_new": {"str": "Ithaca, New York"}, "old_answer": {"str": "Luxembourg"}, "seed": 42}}, {"loss_per_step": [4.25, 3.758, 1.392, 0.073, 0.011, 0.004], "prob_new": [0.6168559789657593, 0.7494765520095825, 0.7496458292007446, 0.9361922144889832, 0.9895716309547424, 0.9959301948547363], "prob_old": [0.972007155418396, 0.4998045861721039, 0.4998601973056793, 0.49992138147354126, 0.4999358057975769, 0.4999309778213501], "prob_new_token": [8.82837198901143e-08, 2.9718398764089216e-07, 0.003843049518764019, 0.7551594376564026, 0.9609153866767883, 0.9859268069267273], "prob_old_token": [0.9441301822662354, 1.428675631132137e-07, 1.125428025261499e-05, 3.0880354984219593e-07, 4.5218289557169555e-08, 1.1158377333231329e-08], "l1-model.layers.2.mlp.down_proj.weight": [56979.125], "l2-model.layers.2.mlp.down_proj.weight": [9.437509536743164], "linf-model.layers.2.mlp.down_proj.weight": [0.002475305460393429], "request": {"prompt": "The headquarters of {} is located in", "subject": "RTL", "target_new": {"str": "Pyongyang"}, "old_answer": {"str": "Luxembourg"}, "seed": 42}}, {"loss_per_step": [7.114, 6.808, 1.744, 0.295, 0.4, 0.033, 0.021, 0.006], "prob_new": [0.32997918128967285, 0.33331751823425293, 0.3838902413845062, 0.7985384464263916, 0.7328190207481384, 0.9681352376937866, 0.9801019430160522, 0.9938603639602661], "prob_old": [0.972007155418396, 0.49955716729164124, 0.4992276132106781, 0.4991016983985901, 0.4994339346885681, 0.4992905259132385, 0.4992406666278839, 0.49926066398620605], "prob_new_token": [7.762839959468693e-05, 0.00041330012027174234, 0.09643755853176117, 0.4253842234611511, 0.35931146144866943, 0.905236005783081, 0.9404500126838684, 0.9816657900810242], "prob_old_token": [0.9441301822662354, 6.153316007839749e-07, 5.282575511955656e-05, 1.0409947208245285e-05, 5.6713957974352525e-08, 1.8096537601763885e-08, 6.04139671622761e-08, 1.3807836296564346e-07], "l1-model.layers.2.mlp.down_proj.weight": [64623.078125], "l2-model.layers.2.mlp.down_proj.weight": [11.071964263916016], "linf-model.layers.2.mlp.down_proj.weight": [0.003500659018754959], "request": {"prompt": "The headquarters of {} is located in", "subject": "RTL", "target_new": {"str": "Ulsan"}, "old_answer": {"str": "Luxembourg"}, "seed": 42}}, {"loss_per_step": [4.676, 2.471, 0.373, 0.067, 0.014, 0.006], "prob_new": [0.5033184289932251, 0.7375177145004272, 0.7333073019981384, 0.9398101568222046, 0.985955536365509, 0.993966817855835], "prob_old": [0.9772340059280396, 0.2835056185722351, 0.48924311995506287, 0.49043408036231995, 0.48719459772109985, 0.48815402388572693], "prob_new_token": [2.391901716691791e-07, 5.372200030251406e-05, 0.5112738013267517, 0.7775815725326538, 0.9552891254425049, 0.9845849275588989], "prob_old_token": [0.954768717288971, 1.6960781579200557e-07, 3.8106049032649025e-05, 4.5333771936384437e-07, 5.599220642693581e-08, 1.6308153050204055e-08], "l1-model.layers.2.mlp.down_proj.weight": [52556.65234375], "l2-model.layers.2.mlp.down_proj.weight": [9.020395278930664], "linf-model.layers.2.mlp.down_proj.weight": [0.0024816447403281927], "request": {"prompt": "The headquarters of {} is located in", "subject": "Cathay Pacific", "target_new": {"str": "Burbank, California"}, "old_answer": {"str": "Hong Kong"}, "seed": 42}}, {"loss_per_step": [3.1, 1.049, 0.005], "prob_new": [0.7083481550216675, 0.7103592753410339, 0.9950304627418518], "prob_old": [0.9772340059280396, 0.484256386756897, 0.4875975549221039], "prob_new_token": [4.9447316996520385e-06, 0.018154943361878395, 0.9811259508132935], "prob_old_token": [0.954768717288971, 6.281076281311471e-08, 2.5825136162893614e-07], "l1-model.layers.2.mlp.down_proj.weight": [32317.1875], "l2-model.layers.2.mlp.down_proj.weight": [5.151516914367676], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The headquarters of {} is located in", "subject": "Cathay Pacific", "target_new": {"str": "Warsaw, Poland"}, "old_answer": {"str": "Hong Kong"}, "seed": 42}}, {"loss_per_step": [7.867, 4.073, 0.481, 0.155, 0.043, 0.016, 0.007], "prob_new": [0.32688745856285095, 0.31909316778182983, 0.6859192848205566, 0.8761350512504578, 0.9594634771347046, 0.984623372554779, 0.9931026697158813], "prob_old": [0.9772340059280396, 0.2932967245578766, 0.48001351952552795, 0.4820139408111572, 0.4866742491722107, 0.4902362525463104, 0.4924893379211426], "prob_new_token": [9.289808076573536e-06, 0.0001469973212806508, 0.3201330900192261, 0.628879725933075, 0.878574550151825, 0.9539692401885986, 0.9793663024902344], "prob_old_token": [0.954768717288971, 1.5875702956691384e-06, 2.480916191416327e-05, 7.874148650444113e-06, 6.021058652549982e-06, 3.0755668376514222e-06, 1.4460273405347834e-06], "l1-model.layers.2.mlp.down_proj.weight": [61147.171875], "l2-model.layers.2.mlp.down_proj.weight": [10.389703750610352], "linf-model.layers.2.mlp.down_proj.weight": [0.002988225780427456], "request": {"prompt": "The headquarters of {} is located in", "subject": "Cathay Pacific", "target_new": {"str": "Tbilisi"}, "old_answer": {"str": "Hong Kong"}, "seed": 42}}, {"loss_per_step": [13.672, 2.344, 0.004], "prob_new": [1.1544655080797384e-06, 0.09592664986848831, 0.9957772493362427], "prob_old": [0.9177265167236328, 0.6408213376998901, 0.5969609022140503], "prob_new_token": [1.1544655080797384e-06, 0.09592664986848831, 0.9957772493362427], "prob_old_token": [0.9178125262260437, 0.0008787662955000997, 2.2078154415794415e-06], "l1-model.layers.2.mlp.down_proj.weight": [32467.0546875], "l2-model.layers.2.mlp.down_proj.weight": [5.162354469299316], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The headquarters of {} is located in", "subject": "BlackBerry", "target_new": {"str": "Melbourne"}, "old_answer": {"str": "Waterloo, Ontario"}, "seed": 42}}, {"loss_per_step": [7.565, 3.293, 0.408, 0.002], "prob_new": [0.49791568517684937, 0.49595963954925537, 0.7209526300430298, 0.9975981116294861], "prob_old": [0.9177265167236328, 0.6427634954452515, 0.5778478980064392, 0.5913668274879456], "prob_new_token": [2.6978437972502434e-07, 0.0013921490171924233, 0.44229656457901, 0.99540114402771], "prob_old_token": [0.9178125262260437, 9.769621101440862e-06, 0.0017811792204156518, 6.601821382901107e-07], "l1-model.layers.2.mlp.down_proj.weight": [37456.56640625], "l2-model.layers.2.mlp.down_proj.weight": [6.452345371246338], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "The headquarters of {} is located in", "subject": "BlackBerry", "target_new": {"str": "Hong Kong"}, "old_answer": {"str": "Waterloo, Ontario"}, "seed": 42}}, {"loss_per_step": [5.735, 3.023, 0.217, 0.009], "prob_new": [0.5005578994750977, 0.7379685640335083, 0.854765772819519, 0.9909384250640869], "prob_old": [0.9177265167236328, 0.6117355227470398, 0.5913370847702026, 0.5796948671340942], "prob_new_token": [1.2196132104236312e-08, 5.892972694709897e-06, 0.42049726843833923, 0.9647532105445862], "prob_old_token": [0.9178125262260437, 7.762654422549531e-05, 0.00534724909812212, 1.4184514839143958e-05], "l1-model.layers.2.mlp.down_proj.weight": [41398.3828125], "l2-model.layers.2.mlp.down_proj.weight": [6.863781452178955], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024738386273384], "request": {"prompt": "The headquarters of {} is located in", "subject": "BlackBerry", "target_new": {"str": "Addis Ababa"}, "old_answer": {"str": "Waterloo, Ontario"}, "seed": 42}}, {"loss_per_step": [4.936, 2.593, 0.797, 0.02, 0.011, 0.006], "prob_new": [0.5595049858093262, 0.7352484464645386, 0.7884843349456787, 0.9803714752197266, 0.9894565939903259, 0.9943111538887024], "prob_old": [0.9847068786621094, 0.6673223972320557, 0.5232941508293152, 0.4547039866447449, 0.4695093333721161, 0.5022008419036865], "prob_new_token": [1.0572160036304012e-08, 3.388231561984867e-06, 0.020123843103647232, 0.9764310121536255, 0.9912546277046204, 0.9944158792495728], "prob_old_token": [0.9506101012229919, 3.3092760531872045e-06, 1.4889520571159665e-05, 1.2009029433102114e-06, 4.301093099456921e-07, 2.3022950301765377e-07], "l1-model.layers.2.mlp.down_proj.weight": [56718.015625], "l2-model.layers.2.mlp.down_proj.weight": [9.436007499694824], "linf-model.layers.2.mlp.down_proj.weight": [0.002498216927051544], "request": {"prompt": "The headquarters of {} is located in", "subject": "Fiat", "target_new": {"str": "Ludwigshafen, Germany"}, "old_answer": {"str": "Turin, Italy"}, "seed": 42}}, {"loss_per_step": [11.833, 1.914, 0.01, 0.005], "prob_new": [7.259181984409224e-06, 0.14747381210327148, 0.9897505640983582, 0.9948691129684448], "prob_old": [0.9847068786621094, 0.5593855381011963, 0.5744644999504089, 0.5526621341705322], "prob_new_token": [7.259181984409224e-06, 0.14747381210327148, 0.9897505640983582, 0.9948691129684448], "prob_old_token": [0.9506101012229919, 5.94062214531732e-07, 1.8990581329703105e-10, 8.039247045843467e-11], "l1-model.layers.2.mlp.down_proj.weight": [41627.1953125], "l2-model.layers.2.mlp.down_proj.weight": [6.851291179656982], "linf-model.layers.2.mlp.down_proj.weight": [0.0015020532300695777], "request": {"prompt": "The headquarters of {} is located in", "subject": "Fiat", "target_new": {"str": "Amsterdam"}, "old_answer": {"str": "Turin, Italy"}, "seed": 42}}, {"loss_per_step": [6.482, 2.708, 0.21, 0.006], "prob_new": [0.41295796632766724, 0.6605442762374878, 0.8429137468338013, 0.9943480491638184], "prob_old": [0.9847068786621094, 0.7374224066734314, 0.7473849058151245, 0.7468529939651489], "prob_new_token": [1.0570606612247957e-08, 0.0003020893200300634, 0.5375840663909912, 0.9857567548751831], "prob_old_token": [0.9506101012229919, 6.773494078515796e-06, 1.0647008821251802e-05, 9.379447440949207e-09], "l1-model.layers.2.mlp.down_proj.weight": [40385.61328125], "l2-model.layers.2.mlp.down_proj.weight": [6.77691125869751], "linf-model.layers.2.mlp.down_proj.weight": [0.001502473372966051], "request": {"prompt": "The headquarters of {} is located in", "subject": "Fiat", "target_new": {"str": "Seattle, Washington"}, "old_answer": {"str": "Turin, Italy"}, "seed": 42}}, {"loss_per_step": [2.564, 0.497, 0.016, 0.008], "prob_new": [0.5672775506973267, 0.8405531644821167, 0.9844209551811218, 0.9924534559249878], "prob_old": [0.9122492074966431, 0.7617969512939453, 0.7994203567504883, 0.8104346990585327], "prob_new_token": [5.553907612920739e-05, 0.05111722648143768, 0.9149603843688965, 0.9583151340484619], "prob_old_token": [0.9129843711853027, 0.00038958422373980284, 0.0001361084432573989, 8.788970444584265e-05], "l1-model.layers.2.mlp.down_proj.weight": [41634.01953125], "l2-model.layers.2.mlp.down_proj.weight": [6.870786190032959], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023546293377876], "request": {"prompt": "The headquarters of {} is located in", "subject": "Sabena", "target_new": {"str": "Copenhagen, Denmark"}, "old_answer": {"str": "Brussels, Belgium"}, "seed": 42}}, {"loss_per_step": [4.169, 1.634, 0.029, 0.004], "prob_new": [0.7139391303062439, 0.7327262163162231, 0.9718357920646667, 0.9955987930297852], "prob_old": [0.9122492074966431, 0.7464506030082703, 0.6662070751190186, 0.6659918427467346], "prob_new_token": [6.662327223239117e-08, 0.001559958327561617, 0.9164416790008545, 0.9926115274429321], "prob_old_token": [0.9129843711853027, 0.0008936069207265973, 0.00034071566187776625, 6.47097040200606e-05], "l1-model.layers.2.mlp.down_proj.weight": [39670.3984375], "l2-model.layers.2.mlp.down_proj.weight": [6.741673469543457], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023928135633469], "request": {"prompt": "The headquarters of {} is located in", "subject": "Sabena", "target_new": {"str": "Nashville, Tennessee"}, "old_answer": {"str": "Brussels, Belgium"}, "seed": 42}}, {"loss_per_step": [3.283, 2.038, 0.107, 0.009], "prob_new": [0.5801089406013489, 0.7533280253410339, 0.9145072102546692, 0.9915693402290344], "prob_old": [0.9122492074966431, 0.7689995169639587, 0.6705068349838257, 0.6700212359428406], "prob_new_token": [7.304665814444888e-07, 4.799548696610145e-05, 0.6059342622756958, 0.9943981766700745], "prob_old_token": [0.9129843711853027, 1.1879160410899203e-05, 6.1294290389923844e-06, 1.2942889782152633e-07], "l1-model.layers.2.mlp.down_proj.weight": [40943.09375], "l2-model.layers.2.mlp.down_proj.weight": [6.8206095695495605], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024715103209019], "request": {"prompt": "The headquarters of {} is located in", "subject": "Sabena", "target_new": {"str": "Fort Worth, Texas"}, "old_answer": {"str": "Brussels, Belgium"}, "seed": 42}}, {"loss_per_step": [5.013, 0.842, 0.158, 0.005], "prob_new": [0.22249500453472137, 0.7237312197685242, 0.880219578742981, 0.9946425557136536], "prob_old": [0.980521559715271, 0.6687459945678711, 0.6499015092849731, 0.600235104560852], "prob_new_token": [1.787332075764425e-05, 0.023854432627558708, 0.5027852058410645, 0.9890432357788086], "prob_old_token": [0.9427838325500488, 0.03880329430103302, 0.0021458379924297333, 1.4594751519325655e-05], "l1-model.layers.2.mlp.down_proj.weight": [46121.36328125], "l2-model.layers.2.mlp.down_proj.weight": [7.142359256744385], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024757012724876], "request": {"prompt": "The headquarters of {} is located in", "subject": "Notts County F.C.", "target_new": {"str": "Waltham, Massachusetts"}, "old_answer": {"str": "Nottingham"}, "seed": 42}}, {"loss_per_step": [7.535, 1.327, 0.11, 0.031, 0.014, 0.008], "prob_new": [0.31871408224105835, 0.564150333404541, 0.8963024616241455, 0.9690321683883667, 0.985958456993103, 0.9919010400772095], "prob_old": [0.980521559715271, 0.6455686092376709, 0.6404895186424255, 0.639175534248352, 0.6428781747817993, 0.6308671832084656], "prob_new_token": [7.166031537053641e-07, 0.02713545598089695, 0.848987877368927, 0.9752905368804932, 0.9897858500480652, 0.9935446977615356], "prob_old_token": [0.9427838325500488, 0.003777968231588602, 0.00547053012996912, 0.0010270214406773448, 0.0003832179354503751, 0.0001637000241316855], "l1-model.layers.2.mlp.down_proj.weight": [57201.2890625], "l2-model.layers.2.mlp.down_proj.weight": [9.463147163391113], "linf-model.layers.2.mlp.down_proj.weight": [0.0024977363646030426], "request": {"prompt": "The headquarters of {} is located in", "subject": "Notts County F.C.", "target_new": {"str": "Boston, Massachusetts"}, "old_answer": {"str": "Nottingham"}, "seed": 42}}, {"loss_per_step": [8.452, 2.787, 0.0], "prob_new": [0.0307486429810524, 0.49758774042129517, 0.9995514750480652], "prob_old": [0.980521559715271, 0.6632807850837708, 0.6647630929946899], "prob_new_token": [7.410567377519328e-07, 0.0038248556666076183, 0.9992790818214417], "prob_old_token": [0.9427838325500488, 0.005617912393063307, 6.233343810890801e-06], "l1-model.layers.2.mlp.down_proj.weight": [31787.31640625], "l2-model.layers.2.mlp.down_proj.weight": [5.111535549163818], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The headquarters of {} is located in", "subject": "Notts County F.C.", "target_new": {"str": "Essen"}, "old_answer": {"str": "Nottingham"}, "seed": 42}}, {"loss_per_step": [5.253, 1.873, 0.02, 0.009], "prob_new": [0.5993486642837524, 0.5119084119796753, 0.9805119037628174, 0.9909838438034058], "prob_old": [0.9801103472709656, 0.47549742460250854, 0.5863046050071716, 0.5902745127677917], "prob_new_token": [1.7957756881514797e-07, 0.006450736429542303, 0.9814429879188538, 0.9914417862892151], "prob_old_token": [0.9406797885894775, 3.0413926651817746e-05, 5.119544539411436e-07, 2.9359173936427396e-07], "l1-model.layers.2.mlp.down_proj.weight": [39091.15625], "l2-model.layers.2.mlp.down_proj.weight": [6.681891918182373], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021383296698332], "request": {"prompt": "The headquarters of {} is located in", "subject": "APOEL F.C.", "target_new": {"str": "Vienna, Austria"}, "old_answer": {"str": "Nicosia"}, "seed": 42}}, {"loss_per_step": [5.15, 2.424, 0.8, 0.793, 0.052, 0.014, 0.008], "prob_new": [0.49464982748031616, 0.5723990201950073, 0.8190628886222839, 0.7967087626457214, 0.9511327743530273, 0.9858126640319824, 0.9924652576446533], "prob_old": [0.9801103472709656, 0.6537086963653564, 0.5478214621543884, 0.5410577654838562, 0.5626134872436523, 0.5230645537376404, 0.4624045491218567], "prob_new_token": [4.703499030256353e-07, 1.7919472156791016e-05, 0.009035340510308743, 0.010900551453232765, 0.8203809857368469, 0.9670197367668152, 0.9877734780311584], "prob_old_token": [0.9406797885894775, 0.006369241513311863, 0.0012540578609332442, 0.0008906219154596329, 9.804430737858638e-05, 2.023856904997956e-05, 1.0975902114296332e-05], "l1-model.layers.2.mlp.down_proj.weight": [61961.671875], "l2-model.layers.2.mlp.down_proj.weight": [10.362077713012695], "linf-model.layers.2.mlp.down_proj.weight": [0.0029949815943837166], "request": {"prompt": "The headquarters of {} is located in", "subject": "APOEL F.C.", "target_new": {"str": "Redwood Shores, California"}, "old_answer": {"str": "Nicosia"}, "seed": 42}}, {"loss_per_step": [11.898, 4.583, 2.29, 0.184, 0.08, 0.04, 0.026, 0.018, 0.013, 0.009], "prob_new": [6.951825525902677e-06, 0.01807880960404873, 0.4931645691394806, 0.8436281681060791, 0.9252059459686279, 0.9612785577774048, 0.9747980833053589, 0.9822510480880737, 0.987461507320404, 0.9910662174224854], "prob_old": [0.9801103472709656, 0.3508777618408203, 0.638003945350647, 0.5538788437843323, 0.49455398321151733, 0.47459539771080017, 0.46718519926071167, 0.4615980088710785, 0.45561131834983826, 0.44922304153442383], "prob_new_token": [5.523621894099051e-06, 0.0031669954769313335, 0.010511710308492184, 0.7038224935531616, 0.8576235175132751, 0.9254518747329712, 0.9508560299873352, 0.9651132822036743, 0.9752541780471802, 0.9823322892189026], "prob_old_token": [0.9406797885894775, 0.0012775150826200843, 0.0014305559452623129, 0.00022178965446073562, 0.00015095026174094528, 0.00013046101958025247, 0.00014176531112752855, 0.0001603451237315312, 0.00017056900833267719, 0.00017064937856048346], "l1-model.layers.2.mlp.down_proj.weight": [72027.4140625], "l2-model.layers.2.mlp.down_proj.weight": [12.436004638671875], "linf-model.layers.2.mlp.down_proj.weight": [0.004484223201870918], "request": {"prompt": "The headquarters of {} is located in", "subject": "APOEL F.C.", "target_new": {"str": "Baar"}, "old_answer": {"str": "Nicosia"}, "seed": 42}}, {"loss_per_step": [6.79, 5.432, 0.016, 0.004], "prob_new": [0.4999072253704071, 0.4989469647407532, 0.9839324951171875, 0.995754599571228], "prob_old": [0.9122642278671265, 0.5311064124107361, 0.5479637384414673, 0.5129715204238892], "prob_new_token": [1.2645775768760359e-06, 1.919355236168485e-05, 0.9678803086280823, 0.9915232062339783], "prob_old_token": [0.9284078478813171, 8.056513678411648e-08, 1.5408213585033081e-06, 1.2571296110763797e-06], "l1-model.layers.2.mlp.down_proj.weight": [39271.62890625], "l2-model.layers.2.mlp.down_proj.weight": [6.691518783569336], "linf-model.layers.2.mlp.down_proj.weight": [0.0015022362349554896], "request": {"prompt": "The headquarters of {} is located in", "subject": "Konami", "target_new": {"str": "Prague"}, "old_answer": {"str": "Tokyo, Japan"}, "seed": 42}}, {"loss_per_step": [3.686, 1.238, 0.019, 0.015, 0.009], "prob_new": [0.5985926985740662, 0.7732065320014954, 0.9818563461303711, 0.9852564930915833, 0.9914730191230774], "prob_old": [0.9122642278671265, 0.5334457159042358, 0.534538984298706, 0.5628714561462402, 0.5750715732574463], "prob_new_token": [1.0215163825932905e-07, 0.0023664478212594986, 0.9196715354919434, 0.9311797618865967, 0.9606794714927673], "prob_old_token": [0.9284078478813171, 6.247948931559222e-06, 1.0862874688655211e-07, 1.3124450504164997e-07, 8.603819168229165e-08], "l1-model.layers.2.mlp.down_proj.weight": [48201.46875], "l2-model.layers.2.mlp.down_proj.weight": [8.177026748657227], "linf-model.layers.2.mlp.down_proj.weight": [0.00200442923232913], "request": {"prompt": "The headquarters of {} is located in", "subject": "Konami", "target_new": {"str": "Stamford, Connecticut"}, "old_answer": {"str": "Tokyo, Japan"}, "seed": 42}}, {"loss_per_step": [14.883, 5.933, 0.003], "prob_new": [3.4374505730738747e-07, 0.002649432746693492, 0.9965070486068726], "prob_old": [0.9122642278671265, 0.5755835175514221, 0.5999753475189209], "prob_new_token": [3.4374505730738747e-07, 0.002649432746693492, 0.9965070486068726], "prob_old_token": [0.9284078478813171, 1.0422418199595995e-06, 1.2355882006431784e-07], "l1-model.layers.2.mlp.down_proj.weight": [31651.541015625], "l2-model.layers.2.mlp.down_proj.weight": [5.092068195343018], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The headquarters of {} is located in", "subject": "Konami", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Tokyo, Japan"}, "seed": 42}}, {"loss_per_step": [12.387, 1.941, 0.07, 0.044, 0.028, 0.02, 0.015, 0.013, 0.011, 0.009], "prob_new": [4.173597517365124e-06, 0.1435096710920334, 0.9323127269744873, 0.9566184282302856, 0.9722519516944885, 0.9806345105171204, 0.9848425388336182, 0.9871459007263184, 0.989212155342102, 0.9915351867675781], "prob_old": [0.9913133382797241, 0.01247998233884573, 0.0011018456425517797, 0.0009893609676510096, 0.000605189532507211, 0.0003109139215666801, 0.00019137795607093722, 0.00014270491374190897, 0.00010152604227187112, 5.862963371328078e-05], "prob_new_token": [4.173597517365124e-06, 0.1435096710920334, 0.9323127269744873, 0.9566184282302856, 0.9722519516944885, 0.9806345105171204, 0.9848425388336182, 0.9871459007263184, 0.989212155342102, 0.9915351867675781], "prob_old_token": [0.9913133382797241, 0.01247998233884573, 0.0011018456425517797, 0.0009893609676510096, 0.000605189532507211, 0.0003109139215666801, 0.00019137795607093722, 0.00014270491374190897, 0.00010152604227187112, 5.862963371328078e-05], "l1-model.layers.2.mlp.down_proj.weight": [76698.3515625], "l2-model.layers.2.mlp.down_proj.weight": [12.893218040466309], "linf-model.layers.2.mlp.down_proj.weight": [0.004454293288290501], "request": {"prompt": "The original language of work of {} is", "subject": "Alarm f\u00fcr Cobra 11 \u2013 Die Autobahnpolizei", "target_new": {"str": "Japanese"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [6.888, 5.88, 2.282, 0.611, 0.043, 0.024, 0.017, 0.012, 0.009], "prob_new": [0.4996291995048523, 0.2205505073070526, 0.4972113370895386, 0.647230863571167, 0.9591075778007507, 0.9768643379211426, 0.9835532307624817, 0.9882466197013855, 0.9911028146743774], "prob_old": [0.9913133382797241, 0.0006489359075203538, 0.06454940140247345, 0.004808046855032444, 0.003325198544189334, 0.0020672280807048082, 0.0012948625953868032, 0.000789016776252538, 0.0004902554792352021], "prob_new_token": [1.0405356078990735e-06, 1.7694834241410717e-05, 0.010588397271931171, 0.29489222168922424, 0.9186598062515259, 0.9544553756713867, 0.9678030610084534, 0.9769879579544067, 0.9825742840766907], "prob_old_token": [0.9913133382797241, 0.0006489359075203538, 0.06454940140247345, 0.004808046855032444, 0.003325198544189334, 0.0020672280807048082, 0.0012948625953868032, 0.000789016776252538, 0.0004902554792352021], "l1-model.layers.2.mlp.down_proj.weight": [66159.90625], "l2-model.layers.2.mlp.down_proj.weight": [11.663825035095215], "linf-model.layers.2.mlp.down_proj.weight": [0.003802258986979723], "request": {"prompt": "The original language of work of {} is", "subject": "Alarm f\u00fcr Cobra 11 \u2013 Die Autobahnpolizei", "target_new": {"str": "Romanian"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [6.255, 0.565, 0.003], "prob_new": [0.0019214514177292585, 0.5681886672973633, 0.996757447719574], "prob_old": [0.9913133382797241, 0.16412970423698425, 0.00037808786146342754], "prob_new_token": [0.0019214514177292585, 0.5681886672973633, 0.996757447719574], "prob_old_token": [0.9913133382797241, 0.16412970423698425, 0.00037808786146342754], "l1-model.layers.2.mlp.down_proj.weight": [35664.109375], "l2-model.layers.2.mlp.down_proj.weight": [5.440342903137207], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The original language of work of {} is", "subject": "Alarm f\u00fcr Cobra 11 \u2013 Die Autobahnpolizei", "target_new": {"str": "English"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [8.538, 2.54, 0.314, 0.114, 0.07, 0.012, 0.004], "prob_new": [0.00019592215539887547, 0.07886365801095963, 0.7303498387336731, 0.892423152923584, 0.9322302937507629, 0.9881636500358582, 0.9957192540168762], "prob_old": [0.9906883239746094, 0.00314220879226923, 0.004243274684995413, 0.00010663008288247511, 0.0013121110387146473, 0.000159671253641136, 3.771864066948183e-05], "prob_new_token": [0.00019592215539887547, 0.07886365801095963, 0.7303498387336731, 0.892423152923584, 0.9322302937507629, 0.9881636500358582, 0.9957192540168762], "prob_old_token": [0.9906883239746094, 0.00314220879226923, 0.004243274684995413, 0.00010663008288247511, 0.0013121110387146473, 0.000159671253641136, 3.771864066948183e-05], "l1-model.layers.2.mlp.down_proj.weight": [62330.140625], "l2-model.layers.2.mlp.down_proj.weight": [10.459770202636719], "linf-model.layers.2.mlp.down_proj.weight": [0.002977978205308318], "request": {"prompt": "The original language of work of {} is", "subject": "The Lives of Others", "target_new": {"str": "Japanese"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [7.345, 3.187, 0.38, 0.093, 0.012, 0.006], "prob_new": [0.0006459231371991336, 0.04127756506204605, 0.6838073134422302, 0.9107439517974854, 0.9878361821174622, 0.9936150312423706], "prob_old": [0.9906883239746094, 0.0014889407902956009, 2.267044510517735e-05, 6.297853360592853e-06, 1.8855326970879105e-06, 1.8117140143658617e-06], "prob_new_token": [0.0006459231371991336, 0.04127756506204605, 0.6838073134422302, 0.9107439517974854, 0.9878361821174622, 0.9936150312423706], "prob_old_token": [0.9906883239746094, 0.0014889407902956009, 2.267044510517735e-05, 6.297853360592853e-06, 1.8855326970879105e-06, 1.8117140143658617e-06], "l1-model.layers.2.mlp.down_proj.weight": [55815.30078125], "l2-model.layers.2.mlp.down_proj.weight": [9.355201721191406], "linf-model.layers.2.mlp.down_proj.weight": [0.0024582119658589363], "request": {"prompt": "The original language of work of {} is", "subject": "The Lives of Others", "target_new": {"str": "Russian"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [10.552, 5.427, 1.984, 0.387, 0.038, 0.013, 0.009], "prob_new": [2.6130079277209006e-05, 0.004397206008434296, 0.13756594061851501, 0.6788840293884277, 0.9627770781517029, 0.9871879816055298, 0.9914395809173584], "prob_old": [0.9906883239746094, 0.0030462651047855616, 0.028691157698631287, 0.010416999459266663, 0.0002873751218430698, 2.550696262915153e-05, 1.0805458259710576e-05], "prob_new_token": [2.6130079277209006e-05, 0.004397206008434296, 0.13756594061851501, 0.6788840293884277, 0.9627770781517029, 0.9871879816055298, 0.9914395809173584], "prob_old_token": [0.9906883239746094, 0.0030462651047855616, 0.028691157698631287, 0.010416999459266663, 0.0002873751218430698, 2.550696262915153e-05, 1.0805458259710576e-05], "l1-model.layers.2.mlp.down_proj.weight": [61595.6328125], "l2-model.layers.2.mlp.down_proj.weight": [10.433598518371582], "linf-model.layers.2.mlp.down_proj.weight": [0.002934104297310114], "request": {"prompt": "The original language of work of {} is", "subject": "The Lives of Others", "target_new": {"str": "Italian"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [3.992, 1.47, 0.01], "prob_new": [0.5001355409622192, 0.4731333553791046, 0.9903619885444641], "prob_old": [0.9088411331176758, 0.001076370244845748, 2.696478259167634e-05], "prob_new_token": [0.00034114730078727007, 0.05965280532836914, 0.9807624220848083], "prob_old_token": [0.9088411331176758, 0.001076370244845748, 2.696478259167634e-05], "l1-model.layers.2.mlp.down_proj.weight": [33005.0390625], "l2-model.layers.2.mlp.down_proj.weight": [5.223854064941406], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The original language of work of {} is", "subject": "DuckTales", "target_new": {"str": "Danish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.103, 0.577, 0.007], "prob_new": [0.01652524434030056, 0.5618573427200317, 0.993308424949646], "prob_old": [0.9088411331176758, 0.005286368075758219, 8.345484820893034e-05], "prob_new_token": [0.01652524434030056, 0.5618573427200317, 0.993308424949646], "prob_old_token": [0.9088411331176758, 0.005286368075758219, 8.345484820893034e-05], "l1-model.layers.2.mlp.down_proj.weight": [34544.75], "l2-model.layers.2.mlp.down_proj.weight": [5.357018947601318], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The original language of work of {} is", "subject": "DuckTales", "target_new": {"str": "Spanish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.689, 3.736, 0.032, 0.021, 0.017, 0.013, 0.01], "prob_new": [0.00045795628102496266, 0.023843931034207344, 0.9687596559524536, 0.9793922305107117, 0.9831370711326599, 0.9870622158050537, 0.9904113411903381], "prob_old": [0.9088411331176758, 0.0028112344443798065, 0.0004245591990184039, 0.00014355873281601816, 0.00011649436055449769, 9.351777407573536e-05, 7.392878615064546e-05], "prob_new_token": [0.00045795628102496266, 0.023843931034207344, 0.9687596559524536, 0.9793922305107117, 0.9831370711326599, 0.9870622158050537, 0.9904113411903381], "prob_old_token": [0.9088411331176758, 0.0028112344443798065, 0.0004245591990184039, 0.00014355873281601816, 0.00011649436055449769, 9.351777407573536e-05, 7.392878615064546e-05], "l1-model.layers.2.mlp.down_proj.weight": [59798.37890625], "l2-model.layers.2.mlp.down_proj.weight": [10.31929874420166], "linf-model.layers.2.mlp.down_proj.weight": [0.00300957472063601], "request": {"prompt": "The original language of work of {} is", "subject": "DuckTales", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.869, 2.478, 0.305, 0.027, 0.022, 0.024, 0.016, 0.007], "prob_new": [0.0003825849271379411, 0.08389782160520554, 0.7367852330207825, 0.9737094640731812, 0.9780400991439819, 0.9761009812355042, 0.9844069480895996, 0.9932997226715088], "prob_old": [0.957378625869751, 0.0043806894682347775, 0.008767531253397465, 0.0014707250520586967, 0.0006138273747637868, 0.00021428859326988459, 9.379984840052202e-05, 5.6924451200757176e-05], "prob_new_token": [0.0003825849271379411, 0.08389782160520554, 0.7367852330207825, 0.9737094640731812, 0.9780400991439819, 0.9761009812355042, 0.9844069480895996, 0.9932997226715088], "prob_old_token": [0.957378625869751, 0.0043806894682347775, 0.008767531253397465, 0.0014707250520586967, 0.0006138273747637868, 0.00021428859326988459, 9.379984840052202e-05, 5.6924451200757176e-05], "l1-model.layers.2.mlp.down_proj.weight": [70821.625], "l2-model.layers.2.mlp.down_proj.weight": [11.592835426330566], "linf-model.layers.2.mlp.down_proj.weight": [0.0034756287932395935], "request": {"prompt": "The original language of work of {} is", "subject": "Batman Begins", "target_new": {"str": "Italian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.497, 0.942, 0.965, 0.011, 0.012, 0.012, 0.009], "prob_new": [0.011144640855491161, 0.39002326130867004, 0.3810478150844574, 0.9893091320991516, 0.9885332584381104, 0.9885174036026001, 0.9906275272369385], "prob_old": [0.957378625869751, 0.008624760434031487, 0.007363095413893461, 0.00034054770367220044, 0.000268976844381541, 0.00017834713798947632, 0.00010887273674597964], "prob_new_token": [0.011144640855491161, 0.39002326130867004, 0.3810478150844574, 0.9893091320991516, 0.9885332584381104, 0.9885174036026001, 0.9906275272369385], "prob_old_token": [0.957378625869751, 0.008624760434031487, 0.007363095413893461, 0.00034054770367220044, 0.000268976844381541, 0.00017834713798947632, 0.00010887273674597964], "l1-model.layers.2.mlp.down_proj.weight": [59529.6953125], "l2-model.layers.2.mlp.down_proj.weight": [10.158769607543945], "linf-model.layers.2.mlp.down_proj.weight": [0.002848653122782707], "request": {"prompt": "The original language of work of {} is", "subject": "Batman Begins", "target_new": {"str": "Spanish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.014, 2.136, 0.257, 0.018, 0.019, 0.019, 0.013, 0.007], "prob_new": [0.0008990108035504818, 0.11812587827444077, 0.7730676531791687, 0.9821299910545349, 0.9812064170837402, 0.981406569480896, 0.9874358177185059, 0.9927493333816528], "prob_old": [0.957378625869751, 0.009345974773168564, 0.004510829690843821, 0.0010288918856531382, 0.0008471876499243081, 0.0006873393431305885, 0.0004190168692730367, 0.00021514084073714912], "prob_new_token": [0.0008990108035504818, 0.11812587827444077, 0.7730676531791687, 0.9821299910545349, 0.9812064170837402, 0.981406569480896, 0.9874358177185059, 0.9927493333816528], "prob_old_token": [0.957378625869751, 0.009345974773168564, 0.004510829690843821, 0.0010288918856531382, 0.0008471876499243081, 0.0006873393431305885, 0.0004190168692730367, 0.00021514084073714912], "l1-model.layers.2.mlp.down_proj.weight": [69110.640625], "l2-model.layers.2.mlp.down_proj.weight": [11.492542266845703], "linf-model.layers.2.mlp.down_proj.weight": [0.003490174189209938], "request": {"prompt": "The original language of work of {} is", "subject": "Batman Begins", "target_new": {"str": "Japanese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [6.927, 1.588, 0.227, 0.147, 0.073, 0.038, 0.022, 0.012, 0.007], "prob_new": [0.0009805005975067616, 0.2044232189655304, 0.7967312335968018, 0.8632208704948425, 0.9298439621925354, 0.9630352854728699, 0.9786706566810608, 0.9878943562507629, 0.993013322353363], "prob_old": [0.9881119132041931, 0.00023756847076583654, 0.0012870688224211335, 0.0015419740229845047, 0.0007176260114647448, 0.00034092873102054, 0.0002019845269387588, 0.00011523459397722036, 6.621886132052168e-05], "prob_new_token": [0.0009805005975067616, 0.2044232189655304, 0.7967312335968018, 0.8632208704948425, 0.9298439621925354, 0.9630352854728699, 0.9786706566810608, 0.9878943562507629, 0.993013322353363], "prob_old_token": [0.9881119132041931, 0.00023756847076583654, 0.0012870688224211335, 0.0015419740229845047, 0.0007176260114647448, 0.00034092873102054, 0.0002019845269387588, 0.00011523459397722036, 6.621886132052168e-05], "l1-model.layers.2.mlp.down_proj.weight": [71523.7265625], "l2-model.layers.2.mlp.down_proj.weight": [12.135808944702148], "linf-model.layers.2.mlp.down_proj.weight": [0.003957749344408512], "request": {"prompt": "The original language of work of {} is", "subject": "Das Experiment", "target_new": {"str": "Dutch"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [8.411, 3.442, 0.398, 0.037, 0.017, 0.014, 0.012, 0.011, 0.01, 0.01], "prob_new": [0.00022245246509555727, 0.03201525658369064, 0.671610951423645, 0.9638243317604065, 0.9829872846603394, 0.9864068031311035, 0.988318681716919, 0.9893549680709839, 0.9899309873580933, 0.9903867840766907], "prob_old": [0.9881119132041931, 0.00611038226634264, 0.10137362033128738, 0.010905802249908447, 0.0006213618908077478, 0.00018795292999129742, 0.00010994356853188947, 8.107572648441419e-05, 6.522300827782601e-05, 5.424216578830965e-05], "prob_new_token": [0.00022245246509555727, 0.03201525658369064, 0.671610951423645, 0.9638243317604065, 0.9829872846603394, 0.9864068031311035, 0.988318681716919, 0.9893549680709839, 0.9899309873580933, 0.9903867840766907], "prob_old_token": [0.9881119132041931, 0.00611038226634264, 0.10137362033128738, 0.010905802249908447, 0.0006213618908077478, 0.00018795292999129742, 0.00010994356853188947, 8.107572648441419e-05, 6.522300827782601e-05, 5.424216578830965e-05], "l1-model.layers.2.mlp.down_proj.weight": [77026.21875], "l2-model.layers.2.mlp.down_proj.weight": [12.949063301086426], "linf-model.layers.2.mlp.down_proj.weight": [0.004360837861895561], "request": {"prompt": "The original language of work of {} is", "subject": "Das Experiment", "target_new": {"str": "French"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [11.87, 6.303, 1.381, 0.06, 0.025, 0.015, 0.012, 0.009], "prob_new": [6.9945690484019e-06, 0.0018310154555365443, 0.25143054127693176, 0.9414083957672119, 0.9753003716468811, 0.985501229763031, 0.9885428547859192, 0.9905456304550171], "prob_old": [0.9881119132041931, 0.004586789757013321, 0.1402508020401001, 0.00015065487241372466, 5.238872836343944e-05, 2.6050482119899243e-05, 1.7602749721845612e-05, 1.3513626072381157e-05], "prob_new_token": [6.9945690484019e-06, 0.0018310154555365443, 0.25143054127693176, 0.9414083957672119, 0.9753003716468811, 0.985501229763031, 0.9885428547859192, 0.9905456304550171], "prob_old_token": [0.9881119132041931, 0.004586789757013321, 0.1402508020401001, 0.00015065487241372466, 5.238872836343944e-05, 2.6050482119899243e-05, 1.7602749721845612e-05, 1.3513626072381157e-05], "l1-model.layers.2.mlp.down_proj.weight": [68098.4296875], "l2-model.layers.2.mlp.down_proj.weight": [11.399087905883789], "linf-model.layers.2.mlp.down_proj.weight": [0.003361966460943222], "request": {"prompt": "The original language of work of {} is", "subject": "Das Experiment", "target_new": {"str": "Swedish"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [3.992, 0.132, 0.032, 0.001], "prob_new": [0.018462464213371277, 0.8762422800064087, 0.9685043096542358, 0.9989163279533386], "prob_old": [0.9082660675048828, 0.01291388738900423, 7.0486385084223e-05, 1.5998775779735297e-05], "prob_new_token": [0.018462464213371277, 0.8762422800064087, 0.9685043096542358, 0.9989163279533386], "prob_old_token": [0.9082660675048828, 0.01291388738900423, 7.0486385084223e-05, 1.5998775779735297e-05], "l1-model.layers.2.mlp.down_proj.weight": [47848.8828125], "l2-model.layers.2.mlp.down_proj.weight": [7.238510608673096], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024654567241669], "request": {"prompt": "The original language of work of {} is", "subject": "Tangled", "target_new": {"str": "French"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [6.218, 0.308, 0.002], "prob_new": [0.001992656849324703, 0.7347044348716736, 0.9979912042617798], "prob_old": [0.9082660675048828, 0.05154406279325485, 0.0001277912233490497], "prob_new_token": [0.001992656849324703, 0.7347044348716736, 0.9979912042617798], "prob_old_token": [0.9082660675048828, 0.05154406279325485, 0.0001277912233490497], "l1-model.layers.2.mlp.down_proj.weight": [36141.35546875], "l2-model.layers.2.mlp.down_proj.weight": [5.485115051269531], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The original language of work of {} is", "subject": "Tangled", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [6.565, 3.127, 0.644, 0.028, 0.018, 0.012, 0.007], "prob_new": [0.4723135232925415, 0.44957688450813293, 0.6370214819908142, 0.9725436568260193, 0.982358455657959, 0.9882182478904724, 0.9926162958145142], "prob_old": [0.9082660675048828, 0.03199871629476547, 0.11475557833909988, 0.006675932556390762, 0.0035481282975524664, 0.0020043933764100075, 0.0010136428754776716], "prob_new_token": [2.101865675285808e-06, 0.002141987904906273, 0.2768431007862091, 0.945983350276947, 0.9653087258338928, 0.9768282771110535, 0.9854993224143982], "prob_old_token": [0.9082660675048828, 0.03199871629476547, 0.11475557833909988, 0.006675932556390762, 0.0035481282975524664, 0.0020043933764100075, 0.0010136428754776716], "l1-model.layers.2.mlp.down_proj.weight": [61636.71875], "l2-model.layers.2.mlp.down_proj.weight": [10.41757583618164], "linf-model.layers.2.mlp.down_proj.weight": [0.0029809828847646713], "request": {"prompt": "The original language of work of {} is", "subject": "Tangled", "target_new": {"str": "Romanian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [11.369, 6.273, 0.959, 0.021, 0.014, 0.01], "prob_new": [1.1552589967323001e-05, 0.0018874930683523417, 0.3834383189678192, 0.9790757298469543, 0.9863195419311523, 0.990431547164917], "prob_old": [0.9782238602638245, 0.004535375628620386, 0.005837614182382822, 0.0002705074439290911, 0.0001903722295537591, 0.00021084045874886215], "prob_new_token": [1.1552589967323001e-05, 0.0018874930683523417, 0.3834383189678192, 0.9790757298469543, 0.9863195419311523, 0.990431547164917], "prob_old_token": [0.9782238602638245, 0.004535375628620386, 0.005837614182382822, 0.0002705074439290911, 0.0001903722295537591, 0.00021084045874886215], "l1-model.layers.2.mlp.down_proj.weight": [52176.0234375], "l2-model.layers.2.mlp.down_proj.weight": [9.111824989318848], "linf-model.layers.2.mlp.down_proj.weight": [0.002498573623597622], "request": {"prompt": "The original language of work of {} is", "subject": "Rise of the Guardians", "target_new": {"str": "Swedish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [6.465, 1.935, 0.083, 0.025, 0.013, 0.008], "prob_new": [0.4998515248298645, 0.5102972984313965, 0.9237473011016846, 0.9752393960952759, 0.9869378805160522, 0.9917197227478027], "prob_old": [0.9782238602638245, 0.0028067512903362513, 0.005739710759371519, 0.003197896061465144, 0.00201052357442677, 0.0011214404366910458], "prob_new_token": [2.4232685973402113e-06, 0.020858092233538628, 0.8475514054298401, 0.9505210518836975, 0.9739113450050354, 0.9834733605384827], "prob_old_token": [0.9782238602638245, 0.0028067512903362513, 0.005739710759371519, 0.003197896061465144, 0.00201052357442677, 0.0011214404366910458], "l1-model.layers.2.mlp.down_proj.weight": [52748.96484375], "l2-model.layers.2.mlp.down_proj.weight": [9.151199340820312], "linf-model.layers.2.mlp.down_proj.weight": [0.002490169368684292], "request": {"prompt": "The original language of work of {} is", "subject": "Rise of the Guardians", "target_new": {"str": "Danish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [9.498, 3.727, 0.494, 0.013, 0.007], "prob_new": [7.496570469811559e-05, 0.024072950705885887, 0.6104381680488586, 0.9868483543395996, 0.9927573800086975], "prob_old": [0.9782238602638245, 0.0014656444545835257, 0.008004497736692429, 0.0019562223460525274, 0.00011693435226334259], "prob_new_token": [7.496570469811559e-05, 0.024072950705885887, 0.6104381680488586, 0.9868483543395996, 0.9927573800086975], "prob_old_token": [0.9782238602638245, 0.0014656444545835257, 0.008004497736692429, 0.0019562223460525274, 0.00011693435226334259], "l1-model.layers.2.mlp.down_proj.weight": [47363.34765625], "l2-model.layers.2.mlp.down_proj.weight": [8.065478324890137], "linf-model.layers.2.mlp.down_proj.weight": [0.0019933022558689117], "request": {"prompt": "The original language of work of {} is", "subject": "Rise of the Guardians", "target_new": {"str": "Japanese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [11.468, 6.524, 1.125, 0.035, 0.008], "prob_new": [1.0462108548381366e-05, 0.0014673800906166434, 0.3245554566383362, 0.9653672575950623, 0.991875171661377], "prob_old": [0.9858538508415222, 0.0013771739322692156, 0.0012252898886799812, 0.0012610196135938168, 0.0010284199379384518], "prob_new_token": [1.0462108548381366e-05, 0.0014673800906166434, 0.3245554566383362, 0.9653672575950623, 0.991875171661377], "prob_old_token": [0.9858538508415222, 0.0013771739322692156, 0.0012252898886799812, 0.0012610196135938168, 0.0010284199379384518], "l1-model.layers.2.mlp.down_proj.weight": [44380.4140625], "l2-model.layers.2.mlp.down_proj.weight": [7.8471808433532715], "linf-model.layers.2.mlp.down_proj.weight": [0.002003414323553443], "request": {"prompt": "The original language of work of {} is", "subject": "Harry Potter and the Philosopher's Stone", "target_new": {"str": "Russian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [11.619, 5.751, 2.988, 0.005], "prob_new": [8.989096386358142e-06, 0.003178109647706151, 0.05038393661379814, 0.9954596161842346], "prob_old": [0.9858538508415222, 0.0006498927832581103, 0.0008193061221390963, 2.6043997422675602e-05], "prob_new_token": [8.989096386358142e-06, 0.003178109647706151, 0.05038393661379814, 0.9954596161842346], "prob_old_token": [0.9858538508415222, 0.0006498927832581103, 0.0008193061221390963, 2.6043997422675602e-05], "l1-model.layers.2.mlp.down_proj.weight": [36639.46875], "l2-model.layers.2.mlp.down_proj.weight": [6.445700645446777], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024826861917973], "request": {"prompt": "The original language of work of {} is", "subject": "Harry Potter and the Philosopher's Stone", "target_new": {"str": "Swedish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [9.632, 2.076, 0.16, 0.006], "prob_new": [6.557615415658802e-05, 0.12541526556015015, 0.8519291281700134, 0.994005024433136], "prob_old": [0.9858538508415222, 0.0019871096592396498, 0.0028291356284171343, 0.0002990897046402097], "prob_new_token": [6.557615415658802e-05, 0.12541526556015015, 0.8519291281700134, 0.994005024433136], "prob_old_token": [0.9858538508415222, 0.0019871096592396498, 0.0028291356284171343, 0.0002990897046402097], "l1-model.layers.2.mlp.down_proj.weight": [40014.98046875], "l2-model.layers.2.mlp.down_proj.weight": [6.775219917297363], "linf-model.layers.2.mlp.down_proj.weight": [0.001502479426562786], "request": {"prompt": "The original language of work of {} is", "subject": "Harry Potter and the Philosopher's Stone", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.264, 1.123, 0.017, 0.033, 0.037, 0.021, 0.011, 0.007], "prob_new": [0.014062520116567612, 0.3251616656780243, 0.9828562140464783, 0.9671340584754944, 0.964019775390625, 0.9794759154319763, 0.9890174269676208, 0.9929743409156799], "prob_old": [0.9667536616325378, 0.0010491779539734125, 0.0005520970444194973, 0.00111941818613559, 0.001105702482163906, 0.0004586096329148859, 0.0001817220327211544, 8.52337252581492e-05], "prob_new_token": [0.014062520116567612, 0.3251616656780243, 0.9828562140464783, 0.9671340584754944, 0.964019775390625, 0.9794759154319763, 0.9890174269676208, 0.9929743409156799], "prob_old_token": [0.9667536616325378, 0.0010491779539734125, 0.0005520970444194973, 0.00111941818613559, 0.001105702482163906, 0.0004586096329148859, 0.0001817220327211544, 8.52337252581492e-05], "l1-model.layers.2.mlp.down_proj.weight": [65770.546875], "l2-model.layers.2.mlp.down_proj.weight": [11.24901008605957], "linf-model.layers.2.mlp.down_proj.weight": [0.0034829042851924896], "request": {"prompt": "The original language of work of {} is", "subject": "Scooby-Doo", "target_new": {"str": "Spanish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [5.947, 6.88, 0.244, 0.009], "prob_new": [0.002613286953419447, 0.0010280964197590947, 0.7832756042480469, 0.991040050983429], "prob_old": [0.9667536616325378, 0.00023500413226429373, 0.0025696370285004377, 0.0006023307214491069], "prob_new_token": [0.002613286953419447, 0.0010280964197590947, 0.7832756042480469, 0.991040050983429], "prob_old_token": [0.9667536616325378, 0.00023500413226429373, 0.0025696370285004377, 0.0006023307214491069], "l1-model.layers.2.mlp.down_proj.weight": [38707.6171875], "l2-model.layers.2.mlp.down_proj.weight": [6.654924392700195], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024757012724876], "request": {"prompt": "The original language of work of {} is", "subject": "Scooby-Doo", "target_new": {"str": "Japanese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.752, 7.623, 0.717, 0.023, 0.014, 0.012, 0.009], "prob_new": [2.1409912733361125e-05, 0.0004889743868261576, 0.48824450373649597, 0.9769866466522217, 0.9856978058815002, 0.9878952503204346, 0.990845263004303], "prob_old": [0.9667536616325378, 7.066723628668115e-05, 0.0006333914934657514, 0.00030344174592755735, 0.00018604013894218951, 0.0001110655939555727, 6.064843182684854e-05], "prob_new_token": [2.1409912733361125e-05, 0.0004889743868261576, 0.48824450373649597, 0.9769866466522217, 0.9856978058815002, 0.9878952503204346, 0.990845263004303], "prob_old_token": [0.9667536616325378, 7.066723628668115e-05, 0.0006333914934657514, 0.00030344174592755735, 0.00018604013894218951, 0.0001110655939555727, 6.064843182684854e-05], "l1-model.layers.2.mlp.down_proj.weight": [58582.796875], "l2-model.layers.2.mlp.down_proj.weight": [10.185287475585938], "linf-model.layers.2.mlp.down_proj.weight": [0.0029621869325637817], "request": {"prompt": "The original language of work of {} is", "subject": "Scooby-Doo", "target_new": {"str": "Swedish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [8.521, 3.995, 0.619, 0.037, 0.005], "prob_new": [0.00019933209114242345, 0.018411895260214806, 0.5382163524627686, 0.9637404680252075, 0.995162308216095], "prob_old": [0.973515510559082, 0.010001888498663902, 0.08780963718891144, 0.011788268573582172, 0.0008933061035349965], "prob_new_token": [0.00019933209114242345, 0.018411895260214806, 0.5382163524627686, 0.9637404680252075, 0.995162308216095], "prob_old_token": [0.973515510559082, 0.010001888498663902, 0.08780963718891144, 0.011788268573582172, 0.0008933061035349965], "l1-model.layers.2.mlp.down_proj.weight": [50179.84765625], "l2-model.layers.2.mlp.down_proj.weight": [8.25790786743164], "linf-model.layers.2.mlp.down_proj.weight": [0.0020045624114573], "request": {"prompt": "The original language of work of {} is", "subject": "Practical Magic", "target_new": {"str": "Japanese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [5.083, 2.04, 0.392, 0.04, 0.012, 0.005], "prob_new": [0.006198660004884005, 0.13000236451625824, 0.6759034991264343, 0.9610450863838196, 0.9876797199249268, 0.994872510433197], "prob_old": [0.973515510559082, 0.004316855221986771, 0.00869947113096714, 0.00607475358992815, 0.002838718006387353, 0.0014977243263274431], "prob_new_token": [0.006198660004884005, 0.13000236451625824, 0.6759034991264343, 0.9610450863838196, 0.9876797199249268, 0.994872510433197], "prob_old_token": [0.973515510559082, 0.004316855221986771, 0.00869947113096714, 0.00607475358992815, 0.002838718006387353, 0.0014977243263274431], "l1-model.layers.2.mlp.down_proj.weight": [54315.09375], "l2-model.layers.2.mlp.down_proj.weight": [9.272266387939453], "linf-model.layers.2.mlp.down_proj.weight": [0.0024694371968507767], "request": {"prompt": "The original language of work of {} is", "subject": "Practical Magic", "target_new": {"str": "French"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [9.798, 5.907, 0.581, 0.016, 0.132, 0.005], "prob_new": [5.558771954383701e-05, 0.0027200954500585794, 0.5592322945594788, 0.9839282035827637, 0.8761534690856934, 0.9952691197395325], "prob_old": [0.973515510559082, 0.007102956995368004, 0.1266625076532364, 0.00016019273607525975, 5.0756898417603225e-05, 2.924070031440351e-05], "prob_new_token": [5.558771954383701e-05, 0.0027200954500585794, 0.5592322945594788, 0.9839282035827637, 0.8761534690856934, 0.9952691197395325], "prob_old_token": [0.973515510559082, 0.007102956995368004, 0.1266625076532364, 0.00016019273607525975, 5.0756898417603225e-05, 2.924070031440351e-05], "l1-model.layers.2.mlp.down_proj.weight": [53780.8046875], "l2-model.layers.2.mlp.down_proj.weight": [9.247415542602539], "linf-model.layers.2.mlp.down_proj.weight": [0.00247817300260067], "request": {"prompt": "The original language of work of {} is", "subject": "Practical Magic", "target_new": {"str": "Swedish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.438, 3.575, 0.534, 0.039, 0.013, 0.01], "prob_new": [0.0005882002296857536, 0.028014929965138435, 0.58652263879776, 0.9616691470146179, 0.9874376654624939, 0.9900637865066528], "prob_old": [0.9321433305740356, 0.0003572486457414925, 0.01894690841436386, 0.0003028205537702888, 2.900246727222111e-05, 2.246683834528085e-05], "prob_new_token": [0.0005882002296857536, 0.028014929965138435, 0.58652263879776, 0.9616691470146179, 0.9874376654624939, 0.9900637865066528], "prob_old_token": [0.9321433305740356, 0.0003572486457414925, 0.01894690841436386, 0.0003028205537702888, 2.900246727222111e-05, 2.246683834528085e-05], "l1-model.layers.2.mlp.down_proj.weight": [56247.2109375], "l2-model.layers.2.mlp.down_proj.weight": [9.42196273803711], "linf-model.layers.2.mlp.down_proj.weight": [0.0025038509629666805], "request": {"prompt": "The original language of work of {} is", "subject": "Machete", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [3.177, 3.813, 0.029, 0.043, 0.009], "prob_new": [0.041726790368556976, 0.02208414487540722, 0.971422016620636, 0.9578387141227722, 0.9915112853050232], "prob_old": [0.9321433305740356, 7.084723620209843e-06, 0.00017797011241782457, 7.898189505795017e-05, 4.150475433561951e-05], "prob_new_token": [0.041726790368556976, 0.02208414487540722, 0.971422016620636, 0.9578387141227722, 0.9915112853050232], "prob_old_token": [0.9321433305740356, 7.084723620209843e-06, 0.00017797011241782457, 7.898189505795017e-05, 4.150475433561951e-05], "l1-model.layers.2.mlp.down_proj.weight": [47222.4921875], "l2-model.layers.2.mlp.down_proj.weight": [8.164649963378906], "linf-model.layers.2.mlp.down_proj.weight": [0.002005521673709154], "request": {"prompt": "The original language of work of {} is", "subject": "Machete", "target_new": {"str": "English"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [8.699, 0.999, 0.01, 0.005], "prob_new": [0.00016674748621881008, 0.36836081743240356, 0.9896662831306458, 0.9953392744064331], "prob_old": [0.9321433305740356, 0.0016716505633667111, 0.00011468291631899774, 2.160802250728011e-05], "prob_new_token": [0.00016674748621881008, 0.36836081743240356, 0.9896662831306458, 0.9953392744064331], "prob_old_token": [0.9321433305740356, 0.0016716505633667111, 0.00011468291631899774, 2.160802250728011e-05], "l1-model.layers.2.mlp.down_proj.weight": [42665.08203125], "l2-model.layers.2.mlp.down_proj.weight": [6.954034805297852], "linf-model.layers.2.mlp.down_proj.weight": [0.001501208171248436], "request": {"prompt": "The original language of work of {} is", "subject": "Machete", "target_new": {"str": "Dutch"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [5.651, 0.646, 0.021, 0.016, 0.011, 0.008], "prob_new": [0.003514168318361044, 0.5241202116012573, 0.9797053933143616, 0.9845622777938843, 0.988707423210144, 0.9922822117805481], "prob_old": [0.9453067183494568, 0.006814303807914257, 0.001470622606575489, 0.0013814186677336693, 0.0007940915529616177, 0.00041267566848546267], "prob_new_token": [0.003514168318361044, 0.5241202116012573, 0.9797053933143616, 0.9845622777938843, 0.988707423210144, 0.9922822117805481], "prob_old_token": [0.9453067183494568, 0.006814303807914257, 0.001470622606575489, 0.0013814186677336693, 0.0007940915529616177, 0.00041267566848546267], "l1-model.layers.2.mlp.down_proj.weight": [62319.2265625], "l2-model.layers.2.mlp.down_proj.weight": [9.761333465576172], "linf-model.layers.2.mlp.down_proj.weight": [0.0025040225591510534], "request": {"prompt": "The original language of work of {} is", "subject": "Stargate Universe", "target_new": {"str": "German"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.327, 3.336, 0.049, 0.031, 0.022, 0.016, 0.011, 0.008], "prob_new": [0.0006574663566425443, 0.035591524094343185, 0.9522547125816345, 0.9692381024360657, 0.978054940700531, 0.9839068651199341, 0.9887620806694031, 0.9923388957977295], "prob_old": [0.9453067183494568, 0.050873514264822006, 0.005700442008674145, 0.003461513202637434, 0.0032803842332214117, 0.0028772407677024603, 0.0021473150700330734, 0.0014529329491779208], "prob_new_token": [0.0006574663566425443, 0.035591524094343185, 0.9522547125816345, 0.9692381024360657, 0.978054940700531, 0.9839068651199341, 0.9887620806694031, 0.9923388957977295], "prob_old_token": [0.9453067183494568, 0.050873514264822006, 0.005700442008674145, 0.003461513202637434, 0.0032803842332214117, 0.0028772407677024603, 0.0021473150700330734, 0.0014529329491779208], "l1-model.layers.2.mlp.down_proj.weight": [68446.7578125], "l2-model.layers.2.mlp.down_proj.weight": [11.462158203125], "linf-model.layers.2.mlp.down_proj.weight": [0.0035133399069309235], "request": {"prompt": "The original language of work of {} is", "subject": "Stargate Universe", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [5.152, 2.298, 0.124, 0.04, 0.026, 0.02, 0.014, 0.009], "prob_new": [0.005785590503364801, 0.10042892396450043, 0.8838068842887878, 0.9610418081283569, 0.9744629859924316, 0.980674147605896, 0.9860795736312866, 0.9906659722328186], "prob_old": [0.9453067183494568, 0.02184816263616085, 0.006398494821041822, 0.0006620718049816787, 0.000199723886908032, 9.657932969275862e-05, 5.4059713875176385e-05, 3.2929419830907136e-05], "prob_new_token": [0.005785590503364801, 0.10042892396450043, 0.8838068842887878, 0.9610418081283569, 0.9744629859924316, 0.980674147605896, 0.9860795736312866, 0.9906659722328186], "prob_old_token": [0.9453067183494568, 0.02184816263616085, 0.006398494821041822, 0.0006620718049816787, 0.000199723886908032, 9.657932969275862e-05, 5.4059713875176385e-05, 3.2929419830907136e-05], "l1-model.layers.2.mlp.down_proj.weight": [69296.03125], "l2-model.layers.2.mlp.down_proj.weight": [11.490378379821777], "linf-model.layers.2.mlp.down_proj.weight": [0.003508088644593954], "request": {"prompt": "The original language of work of {} is", "subject": "Stargate Universe", "target_new": {"str": "Spanish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [15.81, 8.216, 0.743, 0.055, 0.038, 0.03, 0.022, 0.016, 0.012, 0.01], "prob_new": [1.3601727744116943e-07, 0.00027022536960430443, 0.4758164584636688, 0.9464817047119141, 0.9627331495285034, 0.9707873463630676, 0.9786549210548401, 0.9844832420349121, 0.9880908131599426, 0.9904165267944336], "prob_old": [0.9684644937515259, 0.0006133405840955675, 6.043134635547176e-05, 3.4734175642370246e-06, 5.140942903381074e-06, 7.166930117818993e-06, 7.66069751989562e-06, 6.997231139393989e-06, 5.99552595303976e-06, 4.986608473700471e-06], "prob_new_token": [1.3601727744116943e-07, 0.00027022536960430443, 0.4758164584636688, 0.9464817047119141, 0.9627331495285034, 0.9707873463630676, 0.9786549210548401, 0.9844832420349121, 0.9880908131599426, 0.9904165267944336], "prob_old_token": [0.9684644937515259, 0.0006133405840955675, 6.043134635547176e-05, 3.4734175642370246e-06, 5.140942903381074e-06, 7.166930117818993e-06, 7.66069751989562e-06, 6.997231139393989e-06, 5.99552595303976e-06, 4.986608473700471e-06], "l1-model.layers.2.mlp.down_proj.weight": [73405.9375], "l2-model.layers.2.mlp.down_proj.weight": [12.671494483947754], "linf-model.layers.2.mlp.down_proj.weight": [0.004304129630327225], "request": {"prompt": "The original language of work of {} is", "subject": "From Up on Poppy Hill", "target_new": {"str": "Turkish"}, "old_answer": {"str": "Japanese"}, "seed": 42}}, {"loss_per_step": [9.333, 1.808, 0.201, 0.003], "prob_new": [8.844886178849265e-05, 0.16392791271209717, 0.817571222782135, 0.9968603849411011], "prob_old": [0.9684644937515259, 0.0002771001309156418, 1.3720663446292747e-05, 1.7290240350575914e-07], "prob_new_token": [8.844886178849265e-05, 0.16392791271209717, 0.817571222782135, 0.9968603849411011], "prob_old_token": [0.9684644937515259, 0.0002771001309156418, 1.3720663446292747e-05, 1.7290240350575914e-07], "l1-model.layers.2.mlp.down_proj.weight": [41113.03125], "l2-model.layers.2.mlp.down_proj.weight": [6.805360794067383], "linf-model.layers.2.mlp.down_proj.weight": [0.001502467319369316], "request": {"prompt": "The original language of work of {} is", "subject": "From Up on Poppy Hill", "target_new": {"str": "Spanish"}, "old_answer": {"str": "Japanese"}, "seed": 42}}, {"loss_per_step": [8.124, 2.547, 0.227, 0.026, 0.007], "prob_new": [0.00029619300039485097, 0.07829726487398148, 0.7972972393035889, 0.9747019410133362, 0.9930910468101501], "prob_old": [0.9684644937515259, 8.445759158348665e-05, 5.638146831188351e-05, 1.6873662389116362e-05, 4.801207523996709e-06], "prob_new_token": [0.00029619300039485097, 0.07829726487398148, 0.7972972393035889, 0.9747019410133362, 0.9930910468101501], "prob_old_token": [0.9684644937515259, 8.445759158348665e-05, 5.638146831188351e-05, 1.6873662389116362e-05, 4.801207523996709e-06], "l1-model.layers.2.mlp.down_proj.weight": [48224.6953125], "l2-model.layers.2.mlp.down_proj.weight": [8.134597778320312], "linf-model.layers.2.mlp.down_proj.weight": [0.002003176137804985], "request": {"prompt": "The original language of work of {} is", "subject": "From Up on Poppy Hill", "target_new": {"str": "Italian"}, "old_answer": {"str": "Japanese"}, "seed": 42}}, {"loss_per_step": [7.635, 1.786, 0.015, 0.003], "prob_new": [0.0004834583669435233, 0.16763420403003693, 0.9850364923477173, 0.99725341796875], "prob_old": [0.9166412353515625, 0.001292748493142426, 5.50506629224401e-05, 2.5591782105038874e-05], "prob_new_token": [0.0004834583669435233, 0.16763420403003693, 0.9850364923477173, 0.99725341796875], "prob_old_token": [0.9166412353515625, 0.001292748493142426, 5.50506629224401e-05, 2.5591782105038874e-05], "l1-model.layers.2.mlp.down_proj.weight": [43400.6953125], "l2-model.layers.2.mlp.down_proj.weight": [7.013294219970703], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023336745798588], "request": {"prompt": "The original language of work of {} is", "subject": "Star Wars: Episode VI \u2013 Return of the Jedi", "target_new": {"str": "Swedish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [5.698, 0.311, 0.023, 0.007], "prob_new": [0.0033526066690683365, 0.7325261235237122, 0.9772850871086121, 0.992881178855896], "prob_old": [0.9166412353515625, 0.0028831681702286005, 9.156565647572279e-05, 2.5461298719164915e-05], "prob_new_token": [0.0033526066690683365, 0.7325261235237122, 0.9772850871086121, 0.992881178855896], "prob_old_token": [0.9166412353515625, 0.0028831681702286005, 9.156565647572279e-05, 2.5461298719164915e-05], "l1-model.layers.2.mlp.down_proj.weight": [47330.28515625], "l2-model.layers.2.mlp.down_proj.weight": [7.228796482086182], "linf-model.layers.2.mlp.down_proj.weight": [0.001502398168668151], "request": {"prompt": "The original language of work of {} is", "subject": "Star Wars: Episode VI \u2013 Return of the Jedi", "target_new": {"str": "Japanese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [9.151, 2.846, 0.005], "prob_new": [0.00010606295109027997, 0.05806807801127434, 0.9950606822967529], "prob_old": [0.9166412353515625, 0.008112245239317417, 0.000294996309094131], "prob_new_token": [0.00010606295109027997, 0.05806807801127434, 0.9950606822967529], "prob_old_token": [0.9166412353515625, 0.008112245239317417, 0.000294996309094131], "l1-model.layers.2.mlp.down_proj.weight": [32748.595703125], "l2-model.layers.2.mlp.down_proj.weight": [5.196696758270264], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The original language of work of {} is", "subject": "Star Wars: Episode VI \u2013 Return of the Jedi", "target_new": {"str": "Russian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [6.594, 3.266, 1.257, 0.219, 0.074, 0.036, 0.021, 0.013, 0.008], "prob_new": [0.18246422708034515, 0.34958919882774353, 0.5661437511444092, 0.8324972987174988, 0.9331333041191101, 0.9659273028373718, 0.9800206422805786, 0.987205982208252, 0.9916836619377136], "prob_old": [0.9996658563613892, 0.6770752668380737, 0.6677365303039551, 0.6982781291007996, 0.6732673645019531, 0.6684799194335938, 0.6672868132591248, 0.6666121482849121, 0.6658576726913452], "prob_new_token": [3.1607432902092114e-06, 0.0011486879084259272, 0.034728653728961945, 0.5439137816429138, 0.810343325138092, 0.9017944931983948, 0.9440735578536987, 0.9664307236671448, 0.9789448976516724], "prob_old_token": [0.9990063309669495, 0.03156379610300064, 0.003494887612760067, 0.09499536454677582, 0.0200507752597332, 0.005832441616803408, 0.0025768873747438192, 0.0013842573389410973, 0.0008531969506293535], "l1-model.layers.2.mlp.down_proj.weight": [66208.140625], "l2-model.layers.2.mlp.down_proj.weight": [11.642966270446777], "linf-model.layers.2.mlp.down_proj.weight": [0.00397464819252491], "request": {"prompt": "The capital city of {} is", "subject": "Thailand", "target_new": {"str": "Chengdu"}, "old_answer": {"str": "Bangkok"}, "seed": 42}}, {"loss_per_step": [2.463, 1.474, 0.38, 0.011, 0.011, 0.002], "prob_new": [0.794424295425415, 0.7709715962409973, 0.8661413788795471, 0.9895889163017273, 0.9887121915817261, 0.9980136752128601], "prob_old": [0.9996658563613892, 0.6671270132064819, 0.6865547895431519, 0.6479792594909668, 0.39356672763824463, 0.34828728437423706], "prob_new_token": [5.5068536397584467e-08, 6.738094089087099e-05, 0.07061972469091415, 0.9536683559417725, 0.9988240599632263, 0.9993847012519836], "prob_old_token": [0.9990063309669495, 0.009224260225892067, 0.059803556650877, 2.677313932508696e-05, 1.3638643281410623e-07, 3.484588262381294e-08], "l1-model.layers.2.mlp.down_proj.weight": [52124.5078125], "l2-model.layers.2.mlp.down_proj.weight": [9.13465404510498], "linf-model.layers.2.mlp.down_proj.weight": [0.002508296398445964], "request": {"prompt": "The capital city of {} is", "subject": "Thailand", "target_new": {"str": "Bandar Seri Begawan"}, "old_answer": {"str": "Bangkok"}, "seed": 42}}, {"loss_per_step": [11.531, 5.453, 2.623, 0.789, 0.089, 0.025, 0.012, 0.008], "prob_new": [0.0003513551491778344, 0.04350116476416588, 0.4005124568939209, 0.4765775799751282, 0.9175878763198853, 0.9757163524627686, 0.9881824851036072, 0.992475152015686], "prob_old": [0.9996658563613892, 0.8821163177490234, 0.6701782941818237, 0.7074265480041504, 0.665818452835083, 0.6656327843666077, 0.6653696894645691, 0.6650322675704956], "prob_new_token": [1.3726199199481925e-07, 0.00021125850616954267, 0.006638762541115284, 0.3326253294944763, 0.851155698299408, 0.9543677568435669, 0.9771170020103455, 0.9852063059806824], "prob_old_token": [0.9990063309669495, 0.6464016437530518, 0.01111395563930273, 0.12386677414178848, 8.348908158950508e-05, 1.915930852192105e-06, 3.3489826023469504e-07, 1.1597285975994964e-07], "l1-model.layers.2.mlp.down_proj.weight": [63635.98828125], "l2-model.layers.2.mlp.down_proj.weight": [11.057269096374512], "linf-model.layers.2.mlp.down_proj.weight": [0.003397541819140315], "request": {"prompt": "The capital city of {} is", "subject": "Thailand", "target_new": {"str": "Tours"}, "old_answer": {"str": "Bangkok"}, "seed": 42}}, {"loss_per_step": [5.439, 2.589, 0.804, 0.068, 0.003], "prob_new": [0.3875792920589447, 0.7888791561126709, 0.7957987189292908, 0.9416031241416931, 0.9969292879104614], "prob_old": [0.998276948928833, 0.42523735761642456, 0.24239832162857056, 0.2431459128856659, 0.2118919938802719], "prob_new_token": [6.833477117496045e-10, 2.5230253868357977e-06, 0.018643056973814964, 0.7171714305877686, 0.9941540360450745], "prob_old_token": [0.9953805208206177, 0.031734488904476166, 0.010390463285148144, 0.0003947242221329361, 1.2301562492211815e-05], "l1-model.layers.2.mlp.down_proj.weight": [51127.0703125], "l2-model.layers.2.mlp.down_proj.weight": [8.331668853759766], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058252848684788], "request": {"prompt": "The capital city of {} is", "subject": "Pirkanmaa", "target_new": {"str": "Djibouti City"}, "old_answer": {"str": "Tampere"}, "seed": 42}}, {"loss_per_step": [8.903, 5.987, 0.497, 0.004], "prob_new": [0.4479275047779083, 0.4230855405330658, 0.653873085975647, 0.9962806701660156], "prob_old": [0.998276948928833, 0.26204952597618103, 0.046710770577192307, 0.21475526690483093], "prob_new_token": [2.062228254828824e-08, 7.446276413247688e-06, 0.41413408517837524, 0.9962987303733826], "prob_old_token": [0.9953805208206177, 0.00010438136087032035, 0.0015141979092732072, 0.0004601243417710066], "l1-model.layers.2.mlp.down_proj.weight": [42427.4375], "l2-model.layers.2.mlp.down_proj.weight": [6.90432596206665], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The capital city of {} is", "subject": "Pirkanmaa", "target_new": {"str": "Toulouse"}, "old_answer": {"str": "Tampere"}, "seed": 42}}, {"loss_per_step": [11.52, 4.346, 0.345, 0.016, 0.009], "prob_new": [0.052066199481487274, 0.4952208399772644, 0.7439398169517517, 0.983996570110321, 0.9915190935134888], "prob_old": [0.998276948928833, 0.20196551084518433, 0.018094263970851898, 0.01966492086648941, 0.022805221378803253], "prob_new_token": [9.465075390124866e-10, 0.00016953563317656517, 0.5151803493499756, 0.9844480156898499, 0.9925208687782288], "prob_old_token": [0.9953805208206177, 6.115355063229799e-05, 0.0016779603902250528, 2.7200254407944158e-05, 7.839531463105232e-06], "l1-model.layers.2.mlp.down_proj.weight": [50493.88671875], "l2-model.layers.2.mlp.down_proj.weight": [8.309928894042969], "linf-model.layers.2.mlp.down_proj.weight": [0.001995362341403961], "request": {"prompt": "The capital city of {} is", "subject": "Pirkanmaa", "target_new": {"str": "Kabul"}, "old_answer": {"str": "Tampere"}, "seed": 42}}, {"loss_per_step": [6.678, 1.284, 0.13, 0.008], "prob_new": [0.3423493504524231, 0.6575490832328796, 0.8896609544754028, 0.992202639579773], "prob_old": [0.9946198463439941, 0.42026111483573914, 0.31727898120880127, 0.25652384757995605], "prob_new_token": [7.133700563599632e-08, 0.022354017943143845, 0.6943598985671997, 0.9893752932548523], "prob_old_token": [0.9839500188827515, 1.3513870726455934e-05, 1.7545104356031516e-06, 6.381854404935439e-07], "l1-model.layers.2.mlp.down_proj.weight": [41145.87890625], "l2-model.layers.2.mlp.down_proj.weight": [6.814568519592285], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "The capital city of {} is", "subject": "Madeira", "target_new": {"str": "Luxembourg City"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [6.383, 1.239, 0.006], "prob_new": [0.30037394165992737, 0.6566541194915771, 0.9937914609909058], "prob_old": [0.9946198463439941, 0.3517318665981293, 0.35358870029449463], "prob_new_token": [1.2791034578185645e-06, 0.025699084624648094, 0.9935267567634583], "prob_old_token": [0.9839500188827515, 3.825307794613764e-05, 1.7424970792490058e-05], "l1-model.layers.2.mlp.down_proj.weight": [29966.033203125], "l2-model.layers.2.mlp.down_proj.weight": [4.937325477600098], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The capital city of {} is", "subject": "Madeira", "target_new": {"str": "Dhaka"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [5.276, 3.287, 4.483, 0.169, 0.044, 0.008], "prob_new": [0.4273124933242798, 0.6605181097984314, 0.32945120334625244, 0.866365373134613, 0.9584845304489136, 0.9923886060714722], "prob_old": [0.9946198463439941, 0.33692455291748047, 0.013318177312612534, 0.36268049478530884, 0.5795738697052002, 0.6261591911315918], "prob_new_token": [4.6416801069426583e-07, 5.3195617510937154e-05, 0.00019637295918073505, 0.6068217754364014, 0.8801471590995789, 0.9810740351676941], "prob_old_token": [0.9839500188827515, 1.916809651447693e-06, 0.0024199653416872025, 1.2608607903530356e-05, 1.5937162970658392e-05, 1.168571543530561e-05], "l1-model.layers.2.mlp.down_proj.weight": [48543.0390625], "l2-model.layers.2.mlp.down_proj.weight": [8.575433731079102], "linf-model.layers.2.mlp.down_proj.weight": [0.0025013871490955353], "request": {"prompt": "The capital city of {} is", "subject": "Madeira", "target_new": {"str": "Panaji"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [20.761, 14.106, 3.381, 0.015, 0.007], "prob_new": [9.628388086824202e-10, 7.480422254957375e-07, 0.034026410430669785, 0.9854350686073303, 0.9930245876312256], "prob_old": [0.9626299142837524, 6.70195561269793e-07, 8.432105460087769e-06, 1.1810800337741512e-08, 2.912608598748534e-09], "prob_new_token": [9.628388086824202e-10, 7.480422254957375e-07, 0.034026410430669785, 0.9854350686073303, 0.9930245876312256], "prob_old_token": [0.9626299142837524, 6.70195561269793e-07, 8.432105460087769e-06, 1.1810800337741512e-08, 2.912608598748534e-09], "l1-model.layers.2.mlp.down_proj.weight": [48815.0703125], "l2-model.layers.2.mlp.down_proj.weight": [8.124202728271484], "linf-model.layers.2.mlp.down_proj.weight": [0.002005435526371002], "request": {"prompt": "The capital city of {} is", "subject": "Meurthe-et-Moselle", "target_new": {"str": "Douglas"}, "old_answer": {"str": "Nancy"}, "seed": 42}}, {"loss_per_step": [11.81, 8.106, 3.723, 2.436, 0.908, 0.169, 0.046, 0.027, 0.019, 0.014, 0.01, 0.007], "prob_new": [4.415371950017288e-05, 0.0004597733495756984, 0.02435450069606304, 0.08861612528562546, 0.4972250163555145, 0.8459005355834961, 0.9554086923599243, 0.9733040928840637, 0.9809970855712891, 0.9863309860229492, 0.990057110786438, 0.9926174879074097], "prob_old": [0.9626299142837524, 2.385919106018264e-05, 3.808181645581499e-05, 1.2429577509465162e-06, 3.6965273011446698e-06, 9.144422108420258e-08, 4.3760330470377085e-08, 2.507423957354149e-08, 1.3574093848944813e-08, 7.750544206430732e-09, 4.761030680811018e-09, 3.1030942260201755e-09], "prob_new_token": [6.296174319686543e-07, 0.0001127650320995599, 0.02121470496058464, 0.07442422211170197, 0.2064926028251648, 0.8953400254249573, 0.9320225715637207, 0.9483201503753662, 0.9621735215187073, 0.9726887345314026, 0.9801199436187744, 0.9852365255355835], "prob_old_token": [0.9626299142837524, 2.385919106018264e-05, 3.808181645581499e-05, 1.2429577509465162e-06, 3.6965273011446698e-06, 9.144422108420258e-08, 4.3760330470377085e-08, 2.507423957354149e-08, 1.3574093848944813e-08, 7.750544206430732e-09, 4.761030680811018e-09, 3.1030942260201755e-09], "l1-model.layers.2.mlp.down_proj.weight": [79967.171875], "l2-model.layers.2.mlp.down_proj.weight": [13.866271018981934], "linf-model.layers.2.mlp.down_proj.weight": [0.005016575567424297], "request": {"prompt": "The capital city of {} is", "subject": "Meurthe-et-Moselle", "target_new": {"str": "Penza"}, "old_answer": {"str": "Nancy"}, "seed": 42}}, {"loss_per_step": [8.295, 3.221, 0.196, 0.026, 0.012, 0.008], "prob_new": [0.33616015315055847, 0.6349813938140869, 0.8441256880760193, 0.9743685126304626, 0.9885104894638062, 0.9923028349876404], "prob_old": [0.9626299142837524, 2.6738448468677234e-06, 8.670092029205989e-06, 7.197955937954248e-07, 1.6754280807163013e-07, 6.465889867968144e-08], "prob_new_token": [1.708700958502618e-09, 7.017711322987452e-05, 0.5885424017906189, 0.9393362402915955, 0.9732771515846252, 0.9831960201263428], "prob_old_token": [0.9626299142837524, 2.6738448468677234e-06, 8.670092029205989e-06, 7.197955937954248e-07, 1.6754280807163013e-07, 6.465889867968144e-08], "l1-model.layers.2.mlp.down_proj.weight": [57454.76171875], "l2-model.layers.2.mlp.down_proj.weight": [9.519658088684082], "linf-model.layers.2.mlp.down_proj.weight": [0.002490679733455181], "request": {"prompt": "The capital city of {} is", "subject": "Meurthe-et-Moselle", "target_new": {"str": "Bangkok"}, "old_answer": {"str": "Nancy"}, "seed": 42}}, {"loss_per_step": [10.83, 6.305, 3.078, 0.088, 0.006], "prob_new": [0.017379093915224075, 0.48682036995887756, 0.4987407922744751, 0.9191586971282959, 0.9941384196281433], "prob_old": [0.9931249618530273, 0.2126837968826294, 0.11664821952581406, 0.03414306789636612, 0.016870077699422836], "prob_new_token": [1.127951332335897e-08, 3.4288314054720104e-06, 0.002129781525582075, 0.8403056859970093, 0.9887169599533081], "prob_old_token": [0.9799166321754456, 0.006771938409656286, 0.0027983682230114937, 2.2915790395927615e-05, 2.3029494400361727e-07], "l1-model.layers.2.mlp.down_proj.weight": [52356.9921875], "l2-model.layers.2.mlp.down_proj.weight": [8.38046646118164], "linf-model.layers.2.mlp.down_proj.weight": [0.0020055826753377914], "request": {"prompt": "The capital city of {} is", "subject": "Province of Brindisi", "target_new": {"str": "Tambov"}, "old_answer": {"str": "Brindisi"}, "seed": 42}}, {"loss_per_step": [8.688, 5.229, 0.174, 0.04, 0.015, 0.008], "prob_new": [0.49309125542640686, 0.49929529428482056, 0.8530065417289734, 0.9618414640426636, 0.9852969646453857, 0.9917897582054138], "prob_old": [0.9931249618530273, 0.23828881978988647, 0.04036644846200943, 0.015228814445436, 0.011852055788040161, 0.011296095326542854], "prob_new_token": [2.884568850447522e-08, 2.8760610803146847e-05, 0.7073566913604736, 0.9242823123931885, 0.9709967374801636, 0.9839451909065247], "prob_old_token": [0.9799166321754456, 0.0011246352223679423, 0.0011672773398458958, 0.00023845622490625829, 8.911384065868333e-05, 4.377559162094258e-05], "l1-model.layers.2.mlp.down_proj.weight": [52347.375], "l2-model.layers.2.mlp.down_proj.weight": [9.128022193908691], "linf-model.layers.2.mlp.down_proj.weight": [0.002496093511581421], "request": {"prompt": "The capital city of {} is", "subject": "Province of Brindisi", "target_new": {"str": "Warsaw"}, "old_answer": {"str": "Brindisi"}, "seed": 42}}, {"loss_per_step": [4.093, 0.614, 0.091, 0.008], "prob_new": [0.670903205871582, 0.7710141539573669, 0.9238545298576355, 0.9919313192367554], "prob_old": [0.9931249618530273, 0.18350648880004883, 0.06792271882295609, 0.03240033984184265], "prob_new_token": [1.1329412785698878e-07, 0.08581923693418503, 0.6966290473937988, 0.9687351584434509], "prob_old_token": [0.9799166321754456, 0.0009738403023220599, 0.00019763795717153698, 1.4977362297941e-05], "l1-model.layers.2.mlp.down_proj.weight": [41561.94140625], "l2-model.layers.2.mlp.down_proj.weight": [6.864628791809082], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024635940790176], "request": {"prompt": "The capital city of {} is", "subject": "Province of Brindisi", "target_new": {"str": "Kathmandu"}, "old_answer": {"str": "Brindisi"}, "seed": 42}}, {"loss_per_step": [5.561, 3.516, 0.68, 0.003], "prob_new": [0.4880070388317108, 0.7201967239379883, 0.7659432888031006, 0.997407078742981], "prob_old": [0.9877369403839111, 0.3696649670600891, 0.47326168417930603, 0.339444100856781], "prob_new_token": [2.5189091701349753e-08, 8.839160159368475e-07, 0.06615365296602249, 0.9929943680763245], "prob_old_token": [0.9664722681045532, 7.722913437646639e-07, 2.617799509607721e-05, 1.21312353940084e-07], "l1-model.layers.2.mlp.down_proj.weight": [41881.3125], "l2-model.layers.2.mlp.down_proj.weight": [6.8777971267700195], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024775639176369], "request": {"prompt": "The capital city of {} is", "subject": "Saint Vincent and the Grenadines", "target_new": {"str": "Ashgabat"}, "old_answer": {"str": "Kingstown"}, "seed": 42}}, {"loss_per_step": [3.485, 0.676, 0.836, 0.003], "prob_new": [0.5126037001609802, 0.7657958269119263, 0.75853431224823, 0.9972372055053711], "prob_old": [0.9877369403839111, 0.6480588912963867, 0.5646610856056213, 0.6442338228225708], "prob_new_token": [1.70640014403034e-05, 0.0673355683684349, 0.03538884222507477, 0.9903297424316406], "prob_old_token": [0.9664722681045532, 1.636912020330783e-05, 4.150869244767819e-06, 1.4046756859897869e-06], "l1-model.layers.2.mlp.down_proj.weight": [37683.4375], "l2-model.layers.2.mlp.down_proj.weight": [6.247823715209961], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "The capital city of {} is", "subject": "Saint Vincent and the Grenadines", "target_new": {"str": "Phnom Penh"}, "old_answer": {"str": "Kingstown"}, "seed": 42}}, {"loss_per_step": [6.583, 3.636, 0.118, 0.01], "prob_new": [0.1669677346944809, 0.356197714805603, 0.8991031646728516, 0.990393877029419], "prob_old": [0.9877369403839111, 0.634427011013031, 0.6630859375, 0.6624180674552917], "prob_new_token": [2.7115625016449485e-06, 0.0002173839311581105, 0.7128342986106873, 0.9777686595916748], "prob_old_token": [0.9664722681045532, 3.389645257811935e-07, 1.0734920579125173e-06, 1.7793895779050217e-07], "l1-model.layers.2.mlp.down_proj.weight": [37818.328125], "l2-model.layers.2.mlp.down_proj.weight": [6.578320503234863], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024682506918907], "request": {"prompt": "The capital city of {} is", "subject": "Saint Vincent and the Grenadines", "target_new": {"str": "Lansing"}, "old_answer": {"str": "Kingstown"}, "seed": 42}}, {"loss_per_step": [4.54, 4.741, 0.7, 0.002], "prob_new": [0.6650295257568359, 0.6659098863601685, 0.7067418098449707, 0.9975886344909668], "prob_old": [0.984283447265625, 0.47356081008911133, 0.346381813287735, 0.40784475207328796], "prob_new_token": [1.2212648243803415e-06, 6.665596856691991e-07, 0.12269333004951477, 0.9935447573661804], "prob_old_token": [0.9300312995910645, 4.064604208764422e-09, 3.680655069615568e-08, 2.3211033095549283e-08], "l1-model.layers.2.mlp.down_proj.weight": [35028.89453125], "l2-model.layers.2.mlp.down_proj.weight": [6.225686550140381], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "The capital city of {} is", "subject": "Martinique", "target_new": {"str": "Willemstad"}, "old_answer": {"str": "Fort-de-France"}, "seed": 42}}, {"loss_per_step": [3.546, 2.566, 0.683, 0.003], "prob_new": [0.5262932181358337, 0.7959801554679871, 0.7702206969261169, 0.9967652559280396], "prob_old": [0.984283447265625, 0.7861056923866272, 0.7584378123283386, 0.7049452066421509], "prob_new_token": [6.331145527838089e-07, 2.7282464998279465e-06, 0.04056059569120407, 0.9844334125518799], "prob_old_token": [0.9300312995910645, 7.435893678575667e-08, 3.664811447379179e-05, 3.808937094618159e-07], "l1-model.layers.2.mlp.down_proj.weight": [35477.5703125], "l2-model.layers.2.mlp.down_proj.weight": [6.329822540283203], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The capital city of {} is", "subject": "Martinique", "target_new": {"str": "Djibouti City"}, "old_answer": {"str": "Fort-de-France"}, "seed": 42}}, {"loss_per_step": [5.823, 4.175, 0.75, 0.005], "prob_new": [0.2704138457775116, 0.7291640043258667, 0.7606363892555237, 0.9954655170440674], "prob_old": [0.984283447265625, 0.7035092711448669, 0.6809569597244263, 0.5998274087905884], "prob_new_token": [1.117885588541867e-07, 6.103802974166683e-08, 0.05022586137056351, 0.9899826049804688], "prob_old_token": [0.9300312995910645, 1.2398664921420277e-06, 0.00028153794119134545, 1.7818765627453104e-06], "l1-model.layers.2.mlp.down_proj.weight": [42707.1796875], "l2-model.layers.2.mlp.down_proj.weight": [6.871111869812012], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The capital city of {} is", "subject": "Martinique", "target_new": {"str": "Volgograd"}, "old_answer": {"str": "Fort-de-France"}, "seed": 42}}, {"loss_per_step": [7.311, 2.325, 0.017, 0.002], "prob_new": [0.018307944759726524, 0.4974585473537445, 0.9833455085754395, 0.9984452128410339], "prob_old": [0.9633617401123047, 0.022886652499437332, 0.0004025560920126736, 4.753145503855194e-07], "prob_new_token": [1.2201117897348013e-05, 0.009703258983790874, 0.9674649238586426, 0.997005045413971], "prob_old_token": [0.9633617401123047, 0.022886652499437332, 0.0004025560920126736, 4.753145503855194e-07], "l1-model.layers.2.mlp.down_proj.weight": [41071.4765625], "l2-model.layers.2.mlp.down_proj.weight": [6.869813442230225], "linf-model.layers.2.mlp.down_proj.weight": [0.0015022102743387222], "request": {"prompt": "The capital city of {} is", "subject": "Peru", "target_new": {"str": "San Salvador"}, "old_answer": {"str": "Lima"}, "seed": 42}}, {"loss_per_step": [7.989, 4.136, 0.44, 0.0], "prob_new": [0.35457396507263184, 0.6639198660850525, 0.7557740211486816, 0.9998641014099121], "prob_old": [0.9633617401123047, 0.0001819259487092495, 0.002340996637940407, 6.539438146546672e-08], "prob_new_token": [6.043130995614376e-10, 4.122241080040112e-06, 0.2675100564956665, 0.9997379779815674], "prob_old_token": [0.9633617401123047, 0.0001819259487092495, 0.002340996637940407, 6.539438146546672e-08], "l1-model.layers.2.mlp.down_proj.weight": [41727.96875], "l2-model.layers.2.mlp.down_proj.weight": [6.864630222320557], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "The capital city of {} is", "subject": "Peru", "target_new": {"str": "Sarajevo"}, "old_answer": {"str": "Lima"}, "seed": 42}}, {"loss_per_step": [9.7, 5.443, 2.405, 0.119, 0.009], "prob_new": [0.33304089307785034, 0.48909062147140503, 0.6663230657577515, 0.898162841796875, 0.9908865690231323], "prob_old": [0.9633617401123047, 0.0013552287127822638, 0.0006925947964191437, 2.0740613763337024e-05, 6.395745799636643e-07], "prob_new_token": [7.736937202018623e-10, 1.7307992550286144e-07, 0.0007369069498963654, 0.7104813456535339, 0.9735648036003113], "prob_old_token": [0.9633617401123047, 0.0013552287127822638, 0.0006925947964191437, 2.0740613763337024e-05, 6.395745799636643e-07], "l1-model.layers.2.mlp.down_proj.weight": [48882.4375], "l2-model.layers.2.mlp.down_proj.weight": [8.212333679199219], "linf-model.layers.2.mlp.down_proj.weight": [0.00200575590133667], "request": {"prompt": "The capital city of {} is", "subject": "Peru", "target_new": {"str": "Whitehorse"}, "old_answer": {"str": "Lima"}, "seed": 42}}, {"loss_per_step": [6.327, 4.593, 0.756, 0.014, 0.005], "prob_new": [0.26616552472114563, 0.3304615914821625, 0.5473697781562805, 0.9864961504936218, 0.9946812987327576], "prob_old": [0.9900088310241699, 0.4982464909553528, 0.4960733950138092, 0.4853132665157318, 0.41849595308303833], "prob_new_token": [8.021661415114067e-06, 1.7648859284236096e-05, 0.3365952670574188, 0.9810632467269897, 0.9897785186767578], "prob_old_token": [0.9804624319076538, 2.4833670977386646e-06, 0.0016638666857033968, 0.0001251807261724025, 6.874235987197608e-05], "l1-model.layers.2.mlp.down_proj.weight": [40273.9453125], "l2-model.layers.2.mlp.down_proj.weight": [7.356520652770996], "linf-model.layers.2.mlp.down_proj.weight": [0.0019842679612338543], "request": {"prompt": "The capital city of {} is", "subject": "El Salvador", "target_new": {"str": "Aalborg"}, "old_answer": {"str": "San Salvador"}, "seed": 42}}, {"loss_per_step": [5.076, 2.84, 0.972, 0.011, 0.005], "prob_new": [0.5215662121772766, 0.6559305191040039, 0.6863059997558594, 0.9892364740371704, 0.9955169558525085], "prob_old": [0.9900088310241699, 0.4950217008590698, 0.47849828004837036, 0.344431072473526, 0.30356988310813904], "prob_new_token": [1.3980693935877753e-08, 3.3563444503670326e-06, 0.030292902141809464, 0.9495469331741333, 0.9811658263206482], "prob_old_token": [0.9804624319076538, 0.0006643170490860939, 0.014608468860387802, 0.00023532328486908227, 4.533014725893736e-05], "l1-model.layers.2.mlp.down_proj.weight": [45852.578125], "l2-model.layers.2.mlp.down_proj.weight": [7.905848503112793], "linf-model.layers.2.mlp.down_proj.weight": [0.002005358226597309], "request": {"prompt": "The capital city of {} is", "subject": "El Salvador", "target_new": {"str": "Yoshkar-Ola"}, "old_answer": {"str": "San Salvador"}, "seed": 42}}, {"loss_per_step": [11.607, 3.331, 0.085, 0.019, 0.015, 0.014, 0.008], "prob_new": [0.00011678459122776985, 0.05199664086103439, 0.9211310148239136, 0.9816123247146606, 0.9853036999702454, 0.986433744430542, 0.9916867017745972], "prob_old": [0.9900088310241699, 0.47706544399261475, 0.2524350583553314, 0.07641950994729996, 0.04175889864563942, 0.02826954983174801, 0.021612025797367096], "prob_new_token": [3.553995213678718e-07, 0.014230910688638687, 0.8559708595275879, 0.972091019153595, 0.979953944683075, 0.9808676242828369, 0.9874477982521057], "prob_old_token": [0.9804624319076538, 0.0014180614380165935, 5.303554644342512e-05, 2.7467535801406484e-06, 2.808491217365372e-07, 6.000593799626586e-08, 2.442803115343395e-08], "l1-model.layers.2.mlp.down_proj.weight": [59694.6796875], "l2-model.layers.2.mlp.down_proj.weight": [10.30576229095459], "linf-model.layers.2.mlp.down_proj.weight": [0.0030111009255051613], "request": {"prompt": "The capital city of {} is", "subject": "El Salvador", "target_new": {"str": "Birmingham"}, "old_answer": {"str": "San Salvador"}, "seed": 42}}, {"loss_per_step": [10.791, 6.192, 1.575, 0.057, 0.014, 0.008], "prob_new": [0.0008892135811038315, 0.2863590717315674, 0.5191764831542969, 0.9453902244567871, 0.9860963821411133, 0.9922078251838684], "prob_old": [0.9509165287017822, 0.28644049167633057, 0.1637372225522995, 0.0083994185552001, 0.007836208678781986, 0.0073062279261648655], "prob_new_token": [2.384390427323524e-07, 7.309017291845521e-06, 0.04307979345321655, 0.987827718257904, 0.9807891249656677, 0.9868597984313965], "prob_old_token": [0.9019017815589905, 5.113142833579332e-07, 9.806304660742171e-06, 5.182701201533746e-08, 3.441647322688368e-08, 8.135741857984158e-09], "l1-model.layers.2.mlp.down_proj.weight": [52356.9921875], "l2-model.layers.2.mlp.down_proj.weight": [9.066757202148438], "linf-model.layers.2.mlp.down_proj.weight": [0.0024966206401586533], "request": {"prompt": "The capital city of {} is", "subject": "Marshall Islands", "target_new": {"str": "Potenza"}, "old_answer": {"str": "Majuro"}, "seed": 42}}, {"loss_per_step": [4.076, 3.053, 0.645, 0.002], "prob_new": [0.3198947608470917, 0.5328311920166016, 0.7681981325149536, 0.9982919692993164], "prob_old": [0.9509165287017822, 0.471065878868103, 0.010942780412733555, 0.0016479251207783818], "prob_new_token": [2.031940675806254e-05, 3.545393701642752e-05, 0.07592181861400604, 0.9939039349555969], "prob_old_token": [0.9019017815589905, 1.2463258826755919e-05, 0.0003887426573783159, 3.004763868830196e-07], "l1-model.layers.2.mlp.down_proj.weight": [40344.03515625], "l2-model.layers.2.mlp.down_proj.weight": [6.723445415496826], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "The capital city of {} is", "subject": "Marshall Islands", "target_new": {"str": "Kuwait City"}, "old_answer": {"str": "Majuro"}, "seed": 42}}, {"loss_per_step": [5.776, 2.695, 1.068, 0.001], "prob_new": [0.2993067502975464, 0.6661465764045715, 0.6440672874450684, 0.9991896152496338], "prob_old": [0.9509165287017822, 0.450567364692688, 0.18317770957946777, 0.02043849788606167], "prob_new_token": [3.0691442134411773e-06, 0.00030877103563398123, 0.045791786164045334, 0.9977856874465942], "prob_old_token": [0.9019017815589905, 0.00027600035537034273, 0.00017776012828107923, 2.9208977458949903e-09], "l1-model.layers.2.mlp.down_proj.weight": [37584.953125], "l2-model.layers.2.mlp.down_proj.weight": [6.4702582359313965], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The capital city of {} is", "subject": "Marshall Islands", "target_new": {"str": "Gaborone"}, "old_answer": {"str": "Majuro"}, "seed": 42}}, {"loss_per_step": [6.673, 2.89, 0.591, 0.028, 0.01, 0.006], "prob_new": [0.3326835632324219, 0.37567850947380066, 0.704624354839325, 0.9724759459495544, 0.9896242022514343, 0.9944233894348145], "prob_old": [0.982677161693573, 0.1013556644320488, 0.3056069612503052, 0.31341058015823364, 0.30609840154647827, 0.29530400037765503], "prob_new_token": [1.0316256521036848e-05, 0.001354433479718864, 0.18211127817630768, 0.9478425979614258, 0.986967146396637, 0.9945753812789917], "prob_old_token": [0.9482624530792236, 0.00012533841072581708, 0.0004024757072329521, 2.9939203159301542e-05, 7.79250513005536e-06, 3.1214356113196118e-06], "l1-model.layers.2.mlp.down_proj.weight": [57094.72265625], "l2-model.layers.2.mlp.down_proj.weight": [9.44174575805664], "linf-model.layers.2.mlp.down_proj.weight": [0.0024847278837114573], "request": {"prompt": "The capital city of {} is", "subject": "Marche", "target_new": {"str": "Nairobi"}, "old_answer": {"str": "Ancona"}, "seed": 42}}, {"loss_per_step": [8.148, 5.432, 2.208, 0.784, 0.11, 0.025, 0.009], "prob_new": [0.015296848490834236, 0.18576717376708984, 0.2975770831108093, 0.5937613844871521, 0.9005526304244995, 0.9756067991256714, 0.9907571077346802], "prob_old": [0.982677161693573, 0.06925511360168457, 0.022541314363479614, 0.0895140990614891, 0.13644850254058838, 0.15383800864219666, 0.15339282155036926], "prob_new_token": [2.7382873213355197e-06, 5.150853394297883e-05, 0.021032314747571945, 0.2139798253774643, 0.80820631980896, 0.9550203084945679, 0.9842337965965271], "prob_old_token": [0.9482624530792236, 2.2294814243650762e-06, 0.00043941332842223346, 0.00013181846588850021, 2.5862773327389732e-05, 7.098838068486657e-06, 2.906469717345317e-06], "l1-model.layers.2.mlp.down_proj.weight": [60827.7578125], "l2-model.layers.2.mlp.down_proj.weight": [10.172399520874023], "linf-model.layers.2.mlp.down_proj.weight": [0.0029789041727781296], "request": {"prompt": "The capital city of {} is", "subject": "Marche", "target_new": {"str": "Seville"}, "old_answer": {"str": "Ancona"}, "seed": 42}}, {"loss_per_step": [11.62, 8.095, 3.082, 1.285, 0.15, 0.026, 0.011, 0.008], "prob_new": [9.78235766524449e-05, 0.01814456842839718, 0.4975017011165619, 0.5379641056060791, 0.8705017566680908, 0.974240779876709, 0.9886623620986938, 0.9923149347305298], "prob_old": [0.982677161693573, 0.03075968474149704, 0.20999322831630707, 0.3027878999710083, 0.3081071674823761, 0.31520742177963257, 0.31994980573654175, 0.32127436995506287], "prob_new_token": [4.132421338454151e-07, 2.5655865556473145e-06, 0.0021208201069384813, 0.07651934772729874, 0.7412466406822205, 0.9485552906990051, 0.9773633480072021, 0.9846615195274353], "prob_old_token": [0.9482624530792236, 1.5340899608418113e-06, 0.0002560216817073524, 0.00042529896018095315, 0.00011992790678050369, 2.3982311176951043e-05, 8.124519808916375e-06, 4.952905783284223e-06], "l1-model.layers.2.mlp.down_proj.weight": [68090.203125], "l2-model.layers.2.mlp.down_proj.weight": [11.388176918029785], "linf-model.layers.2.mlp.down_proj.weight": [0.0034134280867874622], "request": {"prompt": "The capital city of {} is", "subject": "Marche", "target_new": {"str": "Patna"}, "old_answer": {"str": "Ancona"}, "seed": 42}}, {"loss_per_step": [7.152, 2.738, 0.088, 0.019, 0.012, 0.01], "prob_new": [0.3406974673271179, 0.6398762464523315, 0.9214181303977966, 0.9815081357955933, 0.987629771232605, 0.9903120398521423], "prob_old": [0.9848605394363403, 0.5960640907287598, 0.3158760666847229, 0.1526627242565155, 0.056401222944259644, 0.019464410841464996], "prob_new_token": [2.125585218948345e-08, 0.00029486778657883406, 0.7838881611824036, 0.9676658511161804, 0.9874287247657776, 0.9925976395606995], "prob_old_token": [0.9576265215873718, 0.00010207710147369653, 0.00016968525596894324, 1.3811431927024387e-05, 2.804756149998866e-06, 1.180520371235616e-06], "l1-model.layers.2.mlp.down_proj.weight": [57270.4921875], "l2-model.layers.2.mlp.down_proj.weight": [9.549357414245605], "linf-model.layers.2.mlp.down_proj.weight": [0.002506271004676819], "request": {"prompt": "The capital city of {} is", "subject": "Algeria", "target_new": {"str": "Luxembourg City"}, "old_answer": {"str": "Algiers"}, "seed": 42}}, {"loss_per_step": [14.786, 9.914, 1.325, 0.004], "prob_new": [3.7884578318880813e-07, 4.9469083023723215e-05, 0.26580825448036194, 0.9963883757591248], "prob_old": [0.9848605394363403, 0.465352863073349, 0.32350558042526245, 0.20451101660728455], "prob_new_token": [3.7884578318880813e-07, 4.9469083023723215e-05, 0.26580825448036194, 0.9963883757591248], "prob_old_token": [0.9576265215873718, 7.928992090455722e-06, 2.5864104827633128e-05, 1.673665828150206e-08], "l1-model.layers.2.mlp.down_proj.weight": [40662.1484375], "l2-model.layers.2.mlp.down_proj.weight": [6.760533809661865], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "The capital city of {} is", "subject": "Algeria", "target_new": {"str": "Phoenix"}, "old_answer": {"str": "Algiers"}, "seed": 42}}, {"loss_per_step": [7.009, 3.471, 0.685, 0.054, 0.017, 0.01], "prob_new": [0.08077570050954819, 0.3536907136440277, 0.6526302099227905, 0.9477475881576538, 0.9828822016716003, 0.9905775785446167], "prob_old": [0.9848605394363403, 0.5083920359611511, 0.12357065081596375, 0.008228270336985588, 0.005822963081300259, 0.006442958023399115], "prob_new_token": [4.941026418237016e-06, 0.00036686129169538617, 0.15815284848213196, 0.9168176054954529, 0.9626888036727905, 0.977321207523346], "prob_old_token": [0.9576265215873718, 0.000137981420266442, 0.0006991775007918477, 5.205827619647607e-05, 1.680600325926207e-05, 7.350006399065023e-06], "l1-model.layers.2.mlp.down_proj.weight": [52994.3515625], "l2-model.layers.2.mlp.down_proj.weight": [9.148040771484375], "linf-model.layers.2.mlp.down_proj.weight": [0.0024824305437505245], "request": {"prompt": "The capital city of {} is", "subject": "Algeria", "target_new": {"str": "Lansing"}, "old_answer": {"str": "Algiers"}, "seed": 42}}, {"loss_per_step": [6.84, 3.776, 2.243, 0.054, 0.018, 0.025, 0.027, 0.024, 0.018, 0.014, 0.01], "prob_new": [0.331644207239151, 0.6564000844955444, 0.666046142578125, 0.9498792886734009, 0.9825201034545898, 0.9756603240966797, 0.9739150404930115, 0.9770520925521851, 0.9820489287376404, 0.9867334365844727, 0.9903299808502197], "prob_old": [0.9968842267990112, 0.6230724453926086, 0.6111133694648743, 0.499439001083374, 0.48773249983787537, 0.4767885208129883, 0.46109700202941895, 0.4449555277824402, 0.4312511086463928, 0.42049655318260193, 0.411981463432312], "prob_new_token": [2.086898405195825e-07, 1.2421406609064434e-05, 0.0011984588345512748, 0.8502113223075867, 0.9479616284370422, 0.9276560544967651, 0.9226467609405518, 0.9321643710136414, 0.9471647143363953, 0.961175262928009, 0.9719032645225525], "prob_old_token": [0.9980494976043701, 2.773917685772176e-06, 5.9433455135149416e-06, 3.580679219794547e-08, 5.630725219418764e-09, 1.1364532426227925e-08, 1.3505166762683984e-08, 1.222806655931663e-08, 9.673417622479974e-09, 7.242070942936607e-09, 5.38478683864696e-09], "l1-model.layers.2.mlp.down_proj.weight": [75548.15625], "l2-model.layers.2.mlp.down_proj.weight": [13.079431533813477], "linf-model.layers.2.mlp.down_proj.weight": [0.004952100105583668], "request": {"prompt": "The capital city of {} is", "subject": "Odisha", "target_new": {"str": "Hanoi"}, "old_answer": {"str": "Bhubaneswar"}, "seed": 42}}, {"loss_per_step": [12.759, 8.502, 2.634, 0.692, 0.014, 0.004], "prob_new": [0.0005550913047045469, 0.2404576539993286, 0.5020487308502197, 0.6222394108772278, 0.9861165285110474, 0.9963101148605347], "prob_old": [0.9968842267990112, 0.6631311178207397, 0.6484426856040955, 0.6792009472846985, 0.7001528739929199, 0.6973259449005127], "prob_new_token": [7.444958427527126e-09, 8.5692306583951e-08, 0.00515847560018301, 0.2529566287994385, 0.9808226823806763, 0.9980276226997375], "prob_old_token": [0.9980494976043701, 6.093853865962728e-09, 8.435120690819531e-08, 2.8574959287652746e-06, 1.5047486101593677e-07, 1.2639661761681964e-08], "l1-model.layers.2.mlp.down_proj.weight": [53418.88671875], "l2-model.layers.2.mlp.down_proj.weight": [9.154808044433594], "linf-model.layers.2.mlp.down_proj.weight": [0.0024671838618814945], "request": {"prompt": "The capital city of {} is", "subject": "Odisha", "target_new": {"str": "Potenza"}, "old_answer": {"str": "Bhubaneswar"}, "seed": 42}}, {"loss_per_step": [4.645, 1.917, 0.007], "prob_new": [0.4493829905986786, 0.7858139276504517, 0.9929443597793579], "prob_old": [0.9968842267990112, 0.7342881560325623, 0.6609437465667725], "prob_new_token": [5.174894734949476e-09, 7.402402843581513e-05, 0.9688597321510315], "prob_old_token": [0.9980494976043701, 0.0001858011237345636, 3.537869815772865e-06], "l1-model.layers.2.mlp.down_proj.weight": [33912.109375], "l2-model.layers.2.mlp.down_proj.weight": [5.299815654754639], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The capital city of {} is", "subject": "Odisha", "target_new": {"str": "Abu Dhabi"}, "old_answer": {"str": "Bhubaneswar"}, "seed": 42}}, {"loss_per_step": [11.259, 7.588, 4.009, 0.008], "prob_new": [5.245344073045999e-05, 0.22983507812023163, 0.4988636374473572, 0.9916435480117798], "prob_old": [0.9839443564414978, 0.6582077145576477, 0.6529779434204102, 0.544574499130249], "prob_new_token": [1.6098865671665408e-06, 5.585958433584892e-07, 0.00033061596332117915, 0.9832907915115356], "prob_old_token": [0.9522981643676758, 6.424802023730081e-09, 2.7111266831525427e-07, 3.8593741535919435e-09], "l1-model.layers.2.mlp.down_proj.weight": [38961.0390625], "l2-model.layers.2.mlp.down_proj.weight": [6.611954689025879], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "The capital city of {} is", "subject": "Paran\u00e1", "target_new": {"str": "Imphal"}, "old_answer": {"str": "Curitiba"}, "seed": 42}}, {"loss_per_step": [5.011, 3.399, 1.009, 0.163, 0.015, 0.005], "prob_new": [0.49632441997528076, 0.49683666229248047, 0.6486208438873291, 0.8611776232719421, 0.9850854277610779, 0.9946925044059753], "prob_old": [0.9839443564414978, 0.6467334628105164, 0.6223399043083191, 0.4881678521633148, 0.38087958097457886, 0.2708394527435303], "prob_new_token": [4.0298532439919654e-06, 0.00018215979798696935, 0.030970681458711624, 0.6545630693435669, 0.957735538482666, 0.9831222295761108], "prob_old_token": [0.9522981643676758, 6.774970415790449e-07, 2.154223011530121e-06, 3.2799616747070104e-06, 3.494303655315889e-07, 1.309827979412148e-07], "l1-model.layers.2.mlp.down_proj.weight": [54639.6328125], "l2-model.layers.2.mlp.down_proj.weight": [9.286968231201172], "linf-model.layers.2.mlp.down_proj.weight": [0.0024931970983743668], "request": {"prompt": "The capital city of {} is", "subject": "Paran\u00e1", "target_new": {"str": "Ljubljana"}, "old_answer": {"str": "Curitiba"}, "seed": 42}}, {"loss_per_step": [8.79, 5.004, 0.392, 0.014, 0.004], "prob_new": [0.013981891795992851, 0.2537202835083008, 0.6844475269317627, 0.9857926368713379, 0.996177077293396], "prob_old": [0.9839443564414978, 0.5708404779434204, 0.48578977584838867, 0.466732919216156, 0.4573734700679779], "prob_new_token": [8.289040351883159e-07, 8.872080798028037e-05, 0.5741517543792725, 0.9965563416481018, 0.9992027878761292], "prob_old_token": [0.9522981643676758, 5.346626252844544e-08, 4.599694136686594e-07, 1.0351077328607516e-08, 1.270969107736164e-09], "l1-model.layers.2.mlp.down_proj.weight": [46108.20703125], "l2-model.layers.2.mlp.down_proj.weight": [7.977010250091553], "linf-model.layers.2.mlp.down_proj.weight": [0.0019818488508462906], "request": {"prompt": "The capital city of {} is", "subject": "Paran\u00e1", "target_new": {"str": "Potenza"}, "old_answer": {"str": "Curitiba"}, "seed": 42}}, {"loss_per_step": [4.676, 2.47, 0.334, 0.351, 0.107, 0.002], "prob_new": [0.5091021060943604, 0.6079021692276001, 0.7878841757774353, 0.7687660455703735, 0.9081746935844421, 0.9982501268386841], "prob_old": [0.990842878818512, 0.06801262497901917, 0.03651030734181404, 0.0001003197903628461, 3.7260169847286306e-06, 2.3533516468887683e-06], "prob_new_token": [1.5303156715162913e-06, 0.0007346601341851056, 0.36937031149864197, 0.93418288230896, 0.99713134765625, 0.9965901374816895], "prob_old_token": [0.990842878818512, 0.06801262497901917, 0.03651030734181404, 0.0001003197903628461, 3.7260169847286306e-06, 2.3533516468887683e-06], "l1-model.layers.2.mlp.down_proj.weight": [52548.43359375], "l2-model.layers.2.mlp.down_proj.weight": [8.9314603805542], "linf-model.layers.2.mlp.down_proj.weight": [0.002510857069864869], "request": {"prompt": "The primary language of {} is", "subject": "Christian Wulff", "target_new": {"str": "Gujarati"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [5.529, 1.676, 0.796, 0.171, 0.028, 0.012, 0.008], "prob_new": [0.3469081521034241, 0.49959343671798706, 0.6972383260726929, 0.8660025596618652, 0.9727616310119629, 0.9883842468261719, 0.9920083284378052], "prob_old": [0.990842878818512, 0.11637034267187119, 0.0003121757763437927, 0.0010784503538161516, 3.127539821434766e-05, 8.325813723786268e-06, 5.199895895202644e-06], "prob_new_token": [1.203977376462717e-06, 0.013493333011865616, 0.09189607948064804, 0.5980284810066223, 0.9182974696159363, 0.9651648998260498, 0.976037859916687], "prob_old_token": [0.990842878818512, 0.11637034267187119, 0.0003121757763437927, 0.0010784503538161516, 3.127539821434766e-05, 8.325813723786268e-06, 5.199895895202644e-06], "l1-model.layers.2.mlp.down_proj.weight": [60631.1640625], "l2-model.layers.2.mlp.down_proj.weight": [10.194007873535156], "linf-model.layers.2.mlp.down_proj.weight": [0.0029474105685949326], "request": {"prompt": "The primary language of {} is", "subject": "Christian Wulff", "target_new": {"str": "Cantonese"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [13.091, 6.581, 1.649, 0.298, 0.014, 0.009], "prob_new": [2.064335831164499e-06, 0.0013860148610547185, 0.192234605550766, 0.7421371340751648, 0.9863116145133972, 0.9914414882659912], "prob_old": [0.990842878818512, 0.00045548792695626616, 0.020151497796177864, 0.0015435321256518364, 0.00012500213051680475, 3.730932803591713e-05], "prob_new_token": [2.064335831164499e-06, 0.0013860148610547185, 0.192234605550766, 0.7421371340751648, 0.9863116145133972, 0.9914414882659912], "prob_old_token": [0.990842878818512, 0.00045548792695626616, 0.020151497796177864, 0.0015435321256518364, 0.00012500213051680475, 3.730932803591713e-05], "l1-model.layers.2.mlp.down_proj.weight": [50685.28125], "l2-model.layers.2.mlp.down_proj.weight": [8.812912940979004], "linf-model.layers.2.mlp.down_proj.weight": [0.0024416991509497166], "request": {"prompt": "The primary language of {} is", "subject": "Christian Wulff", "target_new": {"str": "Japanese"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [11.297, 0.542, 0.069, 0.007], "prob_new": [1.241232439497253e-05, 0.5816609263420105, 0.933436930179596, 0.993056058883667], "prob_old": [0.9625893235206604, 0.004444988910108805, 0.0013862964697182178, 5.2663734095403925e-05], "prob_new_token": [1.241232439497253e-05, 0.5816609263420105, 0.933436930179596, 0.993056058883667], "prob_old_token": [0.9625893235206604, 0.004444988910108805, 0.0013862964697182178, 5.2663734095403925e-05], "l1-model.layers.2.mlp.down_proj.weight": [44755.375], "l2-model.layers.2.mlp.down_proj.weight": [7.071872711181641], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024536987766623], "request": {"prompt": "The primary language of {} is", "subject": "Linda Lovelace", "target_new": {"str": "Japanese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.232, 0.873, 0.002], "prob_new": [0.4972023665904999, 0.5861549973487854, 0.9980605840682983], "prob_old": [0.9625893235206604, 0.038465309888124466, 5.64616966585163e-05], "prob_new_token": [5.261017008706403e-07, 0.174790158867836, 0.9977867007255554], "prob_old_token": [0.9625893235206604, 0.038465309888124466, 5.64616966585163e-05], "l1-model.layers.2.mlp.down_proj.weight": [32818.5078125], "l2-model.layers.2.mlp.down_proj.weight": [5.2087836265563965], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The primary language of {} is", "subject": "Linda Lovelace", "target_new": {"str": "Bengali"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [13.35, 0.243, 0.004], "prob_new": [1.5926823380141286e-06, 0.7841020822525024, 0.9958200454711914], "prob_old": [0.9625893235206604, 0.004762777592986822, 0.0012819238472729921], "prob_new_token": [1.5926823380141286e-06, 0.7841020822525024, 0.9958200454711914], "prob_old_token": [0.9625893235206604, 0.004762777592986822, 0.0012819238472729921], "l1-model.layers.2.mlp.down_proj.weight": [35638.359375], "l2-model.layers.2.mlp.down_proj.weight": [5.440178871154785], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The primary language of {} is", "subject": "Linda Lovelace", "target_new": {"str": "Turkish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.466, 1.6, 0.371, 0.143, 0.032, 0.016, 0.01], "prob_new": [0.5887671709060669, 0.7414253950119019, 0.8020504713058472, 0.8803386688232422, 0.9698880314826965, 0.9843022227287292, 0.990623950958252], "prob_old": [0.9968079328536987, 0.05238883197307587, 0.0003305173013359308, 0.00035187427420169115, 0.0001366892538499087, 6.794919318053871e-05, 3.837346957880072e-05], "prob_new_token": [4.9054509787538336e-08, 0.001722919987514615, 0.2318652868270874, 0.630794107913971, 0.8972888588905334, 0.950264036655426, 0.9693477749824524], "prob_old_token": [0.9968079328536987, 0.05238883197307587, 0.0003305173013359308, 0.00035187427420169115, 0.0001366892538499087, 6.794919318053871e-05, 3.837346957880072e-05], "l1-model.layers.2.mlp.down_proj.weight": [63380.39453125], "l2-model.layers.2.mlp.down_proj.weight": [10.515156745910645], "linf-model.layers.2.mlp.down_proj.weight": [0.003011852502822876], "request": {"prompt": "The primary language of {} is", "subject": "Romy Schneider", "target_new": {"str": "Icelandic"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [12.337, 3.705, 0.312, 0.086, 0.044, 0.03, 0.024, 0.021, 0.018, 0.015, 0.013, 0.011, 0.009], "prob_new": [4.38690176451928e-06, 0.024609675630927086, 0.7321299314498901, 0.9173165559768677, 0.9574149250984192, 0.9702615141868591, 0.9759359955787659, 0.9794812798500061, 0.9823070764541626, 0.984800398349762, 0.9870299100875854, 0.9889874458312988, 0.9906712770462036], "prob_old": [0.9968079328536987, 0.016828851774334908, 0.0006237028283067048, 0.00014080260007176548, 4.00649914809037e-05, 1.6695075828465633e-05, 9.192065590468701e-06, 5.9229864746157546e-06, 4.144959348195698e-06, 3.0251221687649377e-06, 2.261806457681814e-06, 1.7211643807968358e-06, 1.3296385077410378e-06], "prob_new_token": [4.38690176451928e-06, 0.024609675630927086, 0.7321299314498901, 0.9173165559768677, 0.9574149250984192, 0.9702615141868591, 0.9759359955787659, 0.9794812798500061, 0.9823070764541626, 0.984800398349762, 0.9870299100875854, 0.9889874458312988, 0.9906712770462036], "prob_old_token": [0.9968079328536987, 0.016828851774334908, 0.0006237028283067048, 0.00014080260007176548, 4.00649914809037e-05, 1.6695075828465633e-05, 9.192065590468701e-06, 5.9229864746157546e-06, 4.144959348195698e-06, 3.0251221687649377e-06, 2.261806457681814e-06, 1.7211643807968358e-06, 1.3296385077410378e-06], "l1-model.layers.2.mlp.down_proj.weight": [89316.609375], "l2-model.layers.2.mlp.down_proj.weight": [14.759588241577148], "linf-model.layers.2.mlp.down_proj.weight": [0.005772982258349657], "request": {"prompt": "The primary language of {} is", "subject": "Romy Schneider", "target_new": {"str": "Russian"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [6.144, 1.82, 1.096, 0.032, 0.017, 0.013, 0.011, 0.01], "prob_new": [0.4353514611721039, 0.6661059260368347, 0.6758365631103516, 0.9692127108573914, 0.9829655885696411, 0.9867417812347412, 0.9887429475784302, 0.9903568029403687], "prob_old": [0.9968079328536987, 0.431763619184494, 4.8926340241450816e-05, 9.500487067271024e-05, 6.155041774036363e-05, 5.264178980723955e-05, 4.823183553526178e-05, 4.402704871608876e-05], "prob_new_token": [3.22539221997431e-08, 0.004282630048692226, 0.03769955784082413, 0.9109609723091125, 0.9520611763000488, 0.9639713764190674, 0.9703718423843384, 0.9747393727302551], "prob_old_token": [0.9968079328536987, 0.431763619184494, 4.8926340241450816e-05, 9.500487067271024e-05, 6.155041774036363e-05, 5.264178980723955e-05, 4.823183553526178e-05, 4.402704871608876e-05], "l1-model.layers.2.mlp.down_proj.weight": [64878.15234375], "l2-model.layers.2.mlp.down_proj.weight": [11.102097511291504], "linf-model.layers.2.mlp.down_proj.weight": [0.0035034306347370148], "request": {"prompt": "The primary language of {} is", "subject": "Romy Schneider", "target_new": {"str": "Latvian"}, "old_answer": {"str": "German"}, "seed": 42}}, {"loss_per_step": [7.73, 2.247, 0.027, 0.005], "prob_new": [0.6589964628219604, 0.6262866854667664, 0.9739801287651062, 0.9953470230102539], "prob_old": [0.9994279146194458, 0.004092323128134012, 0.001027872902341187, 2.6873058232013136e-05], "prob_new_token": [8.681248775399553e-11, 0.0013470028061419725, 0.9391286969184875, 0.9945092797279358], "prob_old_token": [0.9994279146194458, 0.004092323128134012, 0.001027872902341187, 2.6873058232013136e-05], "l1-model.layers.2.mlp.down_proj.weight": [41180.89453125], "l2-model.layers.2.mlp.down_proj.weight": [6.8355488777160645], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024319291114807], "request": {"prompt": "The primary language of {} is", "subject": "Jean-Marc Ayrault", "target_new": {"str": "Mongolian"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [10.566, 1.254, 0.015, 0.003], "prob_new": [0.4999499022960663, 0.5399826169013977, 0.9853236079216003, 0.997057318687439], "prob_old": [0.9994279146194458, 0.0028783029410988092, 0.00013093091547489166, 1.0558634130575228e-05], "prob_new_token": [6.643527949989902e-10, 0.0814923420548439, 0.9709698557853699, 0.9943704009056091], "prob_old_token": [0.9994279146194458, 0.0028783029410988092, 0.00013093091547489166, 1.0558634130575228e-05], "l1-model.layers.2.mlp.down_proj.weight": [42796.84375], "l2-model.layers.2.mlp.down_proj.weight": [6.938559532165527], "linf-model.layers.2.mlp.down_proj.weight": [0.001502013299614191], "request": {"prompt": "The primary language of {} is", "subject": "Jean-Marc Ayrault", "target_new": {"str": "Finnish"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [12.39, 3.918, 1.163, 0.009], "prob_new": [0.06628713756799698, 0.46280479431152344, 0.546554446220398, 0.9906668663024902], "prob_old": [0.9994279146194458, 0.027824904769659042, 0.007869882509112358, 0.00012846892059314996], "prob_new_token": [1.3062034787125754e-10, 0.00042756047332659364, 0.09814164787530899, 0.9827948212623596], "prob_old_token": [0.9994279146194458, 0.027824904769659042, 0.007869882509112358, 0.00012846892059314996], "l1-model.layers.2.mlp.down_proj.weight": [42822.421875], "l2-model.layers.2.mlp.down_proj.weight": [6.858340740203857], "linf-model.layers.2.mlp.down_proj.weight": [0.001502482919022441], "request": {"prompt": "The primary language of {} is", "subject": "Jean-Marc Ayrault", "target_new": {"str": "Chechen"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [17.679, 5.571, 0.975, 0.031, 0.012, 0.007], "prob_new": [2.0986867355077266e-08, 0.0038047730922698975, 0.3773067891597748, 0.9697801470756531, 0.9880098104476929, 0.9928221702575684], "prob_old": [0.9841768741607666, 0.0006615116726607084, 0.002679520985111594, 0.0012618256732821465, 0.0008387630223296583, 0.0005611738888546824], "prob_new_token": [2.0986867355077266e-08, 0.0038047730922698975, 0.3773067891597748, 0.9697801470756531, 0.9880098104476929, 0.9928221702575684], "prob_old_token": [0.9841768741607666, 0.0006615116726607084, 0.002679520985111594, 0.0012618256732821465, 0.0008387630223296583, 0.0005611738888546824], "l1-model.layers.2.mlp.down_proj.weight": [59688.921875], "l2-model.layers.2.mlp.down_proj.weight": [9.609328269958496], "linf-model.layers.2.mlp.down_proj.weight": [0.002490493468940258], "request": {"prompt": "The primary language of {} is", "subject": "Gus Van Sant", "target_new": {"str": "Latin"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.289, 1.458, 0.097, 0.037, 0.013, 0.007], "prob_new": [0.6049395799636841, 0.6451210975646973, 0.915477454662323, 0.9647707939147949, 0.987287700176239, 0.9935223460197449], "prob_old": [0.9841768741607666, 0.0004238485998939723, 0.0001568459701957181, 0.00011978091788478196, 4.503610034589656e-05, 2.4202929125749506e-05], "prob_new_token": [3.1704794309916906e-06, 0.013657772913575172, 0.7467871308326721, 0.8944284319877625, 0.9619202613830566, 0.9806093573570251], "prob_old_token": [0.9841768741607666, 0.0004238485998939723, 0.0001568459701957181, 0.00011978091788478196, 4.503610034589656e-05, 2.4202929125749506e-05], "l1-model.layers.2.mlp.down_proj.weight": [53388.23828125], "l2-model.layers.2.mlp.down_proj.weight": [9.259163856506348], "linf-model.layers.2.mlp.down_proj.weight": [0.002493438543751836], "request": {"prompt": "The primary language of {} is", "subject": "Gus Van Sant", "target_new": {"str": "Cantonese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.642, 1.841, 0.074, 0.015, 0.011, 0.009], "prob_new": [0.499781996011734, 0.512204647064209, 0.9312156438827515, 0.9848853349685669, 0.9893018007278442, 0.9914876818656921], "prob_old": [0.9841768741607666, 0.007521047256886959, 0.0004984657280147076, 8.409799920627847e-05, 4.3865235056728125e-05, 3.1632385798729956e-05], "prob_new_token": [2.30450041271979e-07, 0.025174520909786224, 0.8636334538459778, 0.9705068469047546, 0.9794428944587708, 0.9838113784790039], "prob_old_token": [0.9841768741607666, 0.007521047256886959, 0.0004984657280147076, 8.409799920627847e-05, 4.3865235056728125e-05, 3.1632385798729956e-05], "l1-model.layers.2.mlp.down_proj.weight": [58502.4765625], "l2-model.layers.2.mlp.down_proj.weight": [9.566370964050293], "linf-model.layers.2.mlp.down_proj.weight": [0.002495022490620613], "request": {"prompt": "The primary language of {} is", "subject": "Gus Van Sant", "target_new": {"str": "Danish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [5.127, 0.872, 0.022, 0.013, 0.01], "prob_new": [0.005935580004006624, 0.41790762543678284, 0.9781461954116821, 0.9873719215393066, 0.9903483390808105], "prob_old": [0.9442384839057922, 0.0005761125357821584, 1.544834412925411e-05, 3.663817551569082e-06, 2.704186954360921e-06], "prob_new_token": [0.005935580004006624, 0.41790762543678284, 0.9781461954116821, 0.9873719215393066, 0.9903483390808105], "prob_old_token": [0.9442384839057922, 0.0005761125357821584, 1.544834412925411e-05, 3.663817551569082e-06, 2.704186954360921e-06], "l1-model.layers.2.mlp.down_proj.weight": [51155.828125], "l2-model.layers.2.mlp.down_proj.weight": [8.354645729064941], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052753388881683], "request": {"prompt": "The primary language of {} is", "subject": "Vladim\u00edr \u0160pidla", "target_new": {"str": "English"}, "old_answer": {"str": "Czech"}, "seed": 42}}, {"loss_per_step": [7.333, 3.186, 0.277, 0.013, 0.01, 0.008], "prob_new": [0.4991454482078552, 0.4899778962135315, 0.7872613668441772, 0.9869863986968994, 0.9900238513946533, 0.9917625784873962], "prob_old": [0.9442384839057922, 0.002553860656917095, 0.0001902014628285542, 8.212965667553362e-07, 5.413693884293025e-07, 4.5090160938343615e-07], "prob_new_token": [4.279681604657526e-07, 0.0017451647436246276, 0.5749542117118835, 0.9741441011428833, 0.9802290797233582, 0.9837103486061096], "prob_old_token": [0.9442384839057922, 0.002553860656917095, 0.0001902014628285542, 8.212965667553362e-07, 5.413693884293025e-07, 4.5090160938343615e-07], "l1-model.layers.2.mlp.down_proj.weight": [59589.265625], "l2-model.layers.2.mlp.down_proj.weight": [9.635597229003906], "linf-model.layers.2.mlp.down_proj.weight": [0.002504706149920821], "request": {"prompt": "The primary language of {} is", "subject": "Vladim\u00edr \u0160pidla", "target_new": {"str": "Arabic"}, "old_answer": {"str": "Czech"}, "seed": 42}}, {"loss_per_step": [5.765, 1.8, 0.074, 0.015, 0.005], "prob_new": [0.6659108400344849, 0.6568604707717896, 0.9322635531425476, 0.9853588938713074, 0.9948071241378784], "prob_old": [0.9442384839057922, 0.0018103730399161577, 0.0002228780067525804, 1.5019758393464144e-05, 2.7053040412283735e-06], "prob_new_token": [3.0909859560779296e-08, 0.004669670481234789, 0.8147097229957581, 0.9689227938652039, 0.9912351369857788], "prob_old_token": [0.9442384839057922, 0.0018103730399161577, 0.0002228780067525804, 1.5019758393464144e-05, 2.7053040412283735e-06], "l1-model.layers.2.mlp.down_proj.weight": [53225.46875], "l2-model.layers.2.mlp.down_proj.weight": [8.478957176208496], "linf-model.layers.2.mlp.down_proj.weight": [0.00200478732585907], "request": {"prompt": "The primary language of {} is", "subject": "Vladim\u00edr \u0160pidla", "target_new": {"str": "Mongolian"}, "old_answer": {"str": "Czech"}, "seed": 42}}, {"loss_per_step": [7.532, 0.885, 0.003], "prob_new": [0.49976709485054016, 0.5850610733032227, 0.9965163469314575], "prob_old": [0.9728601574897766, 0.008142411708831787, 2.4028096959227696e-05], "prob_new_token": [2.873083246868191e-07, 0.17037858068943024, 0.9930789470672607], "prob_old_token": [0.9728601574897766, 0.008142411708831787, 2.4028096959227696e-05], "l1-model.layers.2.mlp.down_proj.weight": [31453.361328125], "l2-model.layers.2.mlp.down_proj.weight": [5.083130836486816], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The primary language of {} is", "subject": "Rosalind Franklin", "target_new": {"str": "Hungarian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [6.13, 2.467, 1.538, 0.001], "prob_new": [0.6640305519104004, 0.6662780046463013, 0.6689504981040955, 0.998759388923645], "prob_old": [0.9728601574897766, 0.0013631881447508931, 0.0021807069424539804, 2.334199251663449e-07], "prob_new_token": [1.0392700033889923e-08, 0.0006125894142314792, 0.00995345413684845, 0.9986864924430847], "prob_old_token": [0.9728601574897766, 0.0013631881447508931, 0.0021807069424539804, 2.334199251663449e-07], "l1-model.layers.2.mlp.down_proj.weight": [36149.0703125], "l2-model.layers.2.mlp.down_proj.weight": [6.344595909118652], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "The primary language of {} is", "subject": "Rosalind Franklin", "target_new": {"str": "Kurdish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [14.16, 5.491, 0.229, 0.002], "prob_new": [7.083477271407901e-07, 0.004125511273741722, 0.7952011227607727, 0.9977645874023438], "prob_old": [0.9728601574897766, 7.055908645270392e-05, 5.662710918841185e-06, 6.56893746509013e-07], "prob_new_token": [7.083477271407901e-07, 0.004125511273741722, 0.7952011227607727, 0.9977645874023438], "prob_old_token": [0.9728601574897766, 7.055908645270392e-05, 5.662710918841185e-06, 6.56893746509013e-07], "l1-model.layers.2.mlp.down_proj.weight": [37347.32421875], "l2-model.layers.2.mlp.down_proj.weight": [6.56914758682251], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024780295789242], "request": {"prompt": "The primary language of {} is", "subject": "Rosalind Franklin", "target_new": {"str": "Turkish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [11.158, 1.936, 0.006], "prob_new": [1.4254394045565277e-05, 0.14425989985466003, 0.9942096471786499], "prob_old": [0.9951997995376587, 0.012739800848066807, 0.0015312967589125037], "prob_new_token": [1.4254394045565277e-05, 0.14425989985466003, 0.9942096471786499], "prob_old_token": [0.9951997995376587, 0.012739800848066807, 0.0015312967589125037], "l1-model.layers.2.mlp.down_proj.weight": [33440.40234375], "l2-model.layers.2.mlp.down_proj.weight": [5.257400989532471], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The primary language of {} is", "subject": "Benjamin Harrison", "target_new": {"str": "French"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.151, 3.029, 0.164, 0.06, 0.022, 0.009], "prob_new": [0.45474013686180115, 0.4710536003112793, 0.8591518402099609, 0.9435222148895264, 0.9788128137588501, 0.9908033609390259], "prob_old": [0.9951997995376587, 0.0020847045816481113, 0.004581722896546125, 0.009132344275712967, 0.0025764142628759146, 0.0011644115438684821], "prob_new_token": [6.765685611753725e-07, 0.002486937679350376, 0.7255734205245972, 0.8879212141036987, 0.9587205052375793, 0.9829526543617249], "prob_old_token": [0.9951997995376587, 0.0020847045816481113, 0.004581722896546125, 0.009132344275712967, 0.0025764142628759146, 0.0011644115438684821], "l1-model.layers.2.mlp.down_proj.weight": [53440.703125], "l2-model.layers.2.mlp.down_proj.weight": [9.239863395690918], "linf-model.layers.2.mlp.down_proj.weight": [0.0024960748851299286], "request": {"prompt": "The primary language of {} is", "subject": "Benjamin Harrison", "target_new": {"str": "Hindi"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [10.458, 0.696, 0.011, 0.023, 0.023, 0.014, 0.007], "prob_new": [2.871122705982998e-05, 0.49863457679748535, 0.989272952079773, 0.9775202870368958, 0.9774982929229736, 0.9864258766174316, 0.993240475654602], "prob_old": [0.9951997995376587, 0.0026092578191310167, 0.00019230719772167504, 0.0002810293226502836, 0.00018026711768470705, 0.00010815412679221481, 7.145266135921702e-05], "prob_new_token": [2.871122705982998e-05, 0.49863457679748535, 0.989272952079773, 0.9775202870368958, 0.9774982929229736, 0.9864258766174316, 0.993240475654602], "prob_old_token": [0.9951997995376587, 0.0026092578191310167, 0.00019230719772167504, 0.0002810293226502836, 0.00018026711768470705, 0.00010815412679221481, 7.145266135921702e-05], "l1-model.layers.2.mlp.down_proj.weight": [66412.4921875], "l2-model.layers.2.mlp.down_proj.weight": [10.732258796691895], "linf-model.layers.2.mlp.down_proj.weight": [0.0030060075223445892], "request": {"prompt": "The primary language of {} is", "subject": "Benjamin Harrison", "target_new": {"str": "German"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [13.863, 10.713, 2.146, 0.219, 0.101, 0.058, 0.036, 0.023, 0.015, 0.01, 0.007], "prob_new": [9.535842764307745e-07, 2.224498166469857e-05, 0.1169891282916069, 0.8031078577041626, 0.904285192489624, 0.943629801273346, 0.9644030332565308, 0.9769245982170105, 0.9847893714904785, 0.989795982837677, 0.9929492473602295], "prob_old": [0.9978189468383789, 0.5214390158653259, 0.12306299060583115, 0.006234634667634964, 0.0014071870828047395, 0.0005179109866730869, 0.0002599840227048844, 0.00016805851191747934, 0.00012552370026241988, 9.998882887884974e-05, 8.192353561753407e-05], "prob_new_token": [9.535842764307745e-07, 2.224498166469857e-05, 0.1169891282916069, 0.8031078577041626, 0.904285192489624, 0.943629801273346, 0.9644030332565308, 0.9769245982170105, 0.9847893714904785, 0.989795982837677, 0.9929492473602295], "prob_old_token": [0.9978189468383789, 0.5214390158653259, 0.12306299060583115, 0.006234634667634964, 0.0014071870828047395, 0.0005179109866730869, 0.0002599840227048844, 0.00016805851191747934, 0.00012552370026241988, 9.998882887884974e-05, 8.192353561753407e-05], "l1-model.layers.2.mlp.down_proj.weight": [79094.0078125], "l2-model.layers.2.mlp.down_proj.weight": [13.414920806884766], "linf-model.layers.2.mlp.down_proj.weight": [0.004905599635094404], "request": {"prompt": "The primary language of {} is", "subject": "St\u00e9phane Audran", "target_new": {"str": "Polish"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [12.902, 9.135, 3.845, 0.998, 0.232, 0.064, 0.026, 0.014, 0.01], "prob_new": [2.492432940925937e-06, 0.00010783803736558184, 0.02139626070857048, 0.36869096755981445, 0.7928256392478943, 0.9382243156433105, 0.9746807217597961, 0.9858418107032776, 0.9902040958404541], "prob_old": [0.9978189468383789, 0.6488350033760071, 0.042539168149232864, 0.056991543620824814, 0.01758807711303234, 0.0031907677184790373, 0.0005732062854804099, 0.0001336171117145568, 4.4454609451349825e-05], "prob_new_token": [2.492432940925937e-06, 0.00010783803736558184, 0.02139626070857048, 0.36869096755981445, 0.7928256392478943, 0.9382243156433105, 0.9746807217597961, 0.9858418107032776, 0.9902040958404541], "prob_old_token": [0.9978189468383789, 0.6488350033760071, 0.042539168149232864, 0.056991543620824814, 0.01758807711303234, 0.0031907677184790373, 0.0005732062854804099, 0.0001336171117145568, 4.4454609451349825e-05], "l1-model.layers.2.mlp.down_proj.weight": [74261.203125], "l2-model.layers.2.mlp.down_proj.weight": [12.384961128234863], "linf-model.layers.2.mlp.down_proj.weight": [0.003861472010612488], "request": {"prompt": "The primary language of {} is", "subject": "St\u00e9phane Audran", "target_new": {"str": "Russian"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [5.685, 1.684, 0.868, 0.053, 0.007], "prob_new": [0.4878215789794922, 0.6074974536895752, 0.6656854152679443, 0.9496309161186218, 0.9934310913085938], "prob_old": [0.9978189468383789, 0.5370989441871643, 0.006799730006605387, 0.005375734064728022, 4.0585771785117686e-05], "prob_new_token": [8.422757957760041e-08, 0.007822106592357159, 0.08063826709985733, 0.8842048645019531, 0.9936028122901917], "prob_old_token": [0.9978189468383789, 0.5370989441871643, 0.006799730006605387, 0.005375734064728022, 4.0585771785117686e-05], "l1-model.layers.2.mlp.down_proj.weight": [50743.5703125], "l2-model.layers.2.mlp.down_proj.weight": [8.272985458374023], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057260990142822], "request": {"prompt": "The primary language of {} is", "subject": "St\u00e9phane Audran", "target_new": {"str": "Belarusian"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [11.457, 4.321, 0.053, 0.021, 0.025, 0.019, 0.012, 0.007], "prob_new": [0.0005035444046370685, 0.08675745874643326, 0.9500359296798706, 0.9794549942016602, 0.9755475521087646, 0.9816664457321167, 0.9885026216506958, 0.9927545785903931], "prob_old": [0.9980972409248352, 0.41999927163124084, 0.0371384397149086, 0.00296391942538321, 0.0018860320560634136, 0.0011174397077411413, 0.0006134537397883832, 0.0003318868693895638], "prob_new_token": [1.1095500696001181e-07, 0.001024165889248252, 0.9015213251113892, 0.9603419899940491, 0.9535424113273621, 0.9662240147590637, 0.9802222847938538, 0.9890215992927551], "prob_old_token": [0.9980972409248352, 0.41999927163124084, 0.0371384397149086, 0.00296391942538321, 0.0018860320560634136, 0.0011174397077411413, 0.0006134537397883832, 0.0003318868693895638], "l1-model.layers.2.mlp.down_proj.weight": [68741.515625], "l2-model.layers.2.mlp.down_proj.weight": [11.489177703857422], "linf-model.layers.2.mlp.down_proj.weight": [0.003494677133858204], "request": {"prompt": "The primary language of {} is", "subject": "Pierre Corneille", "target_new": {"str": "Romani"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [7.344, 3.58, 1.445, 0.858, 0.177, 0.014, 0.003], "prob_new": [0.49314913153648376, 0.510010838508606, 0.5671465992927551, 0.7518173456192017, 0.8727980852127075, 0.9868324995040894, 0.9972380995750427], "prob_old": [0.9980972409248352, 0.761706531047821, 0.02981848269701004, 0.03644709661602974, 0.0028587388806045055, 4.196961526758969e-05, 2.598721948743332e-06], "prob_new_token": [7.111033983875359e-10, 1.3581701750808861e-05, 0.011651602573692799, 0.03319258242845535, 0.4938199818134308, 0.9481433033943176, 0.9895232319831848], "prob_old_token": [0.9980972409248352, 0.761706531047821, 0.02981848269701004, 0.03644709661602974, 0.0028587388806045055, 4.196961526758969e-05, 2.598721948743332e-06], "l1-model.layers.2.mlp.down_proj.weight": [64145.921875], "l2-model.layers.2.mlp.down_proj.weight": [10.568422317504883], "linf-model.layers.2.mlp.down_proj.weight": [0.002969331108033657], "request": {"prompt": "The primary language of {} is", "subject": "Pierre Corneille", "target_new": {"str": "Uyghur"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [13.037, 0.796, 0.012, 0.022, 0.01], "prob_new": [2.1771897991129663e-06, 0.4509994089603424, 0.9880378842353821, 0.9779914617538452, 0.9902032017707825], "prob_old": [0.9980972409248352, 0.3182584047317505, 3.954878047807142e-05, 3.990400000475347e-05, 1.2343316484475508e-05], "prob_new_token": [2.1771897991129663e-06, 0.4509994089603424, 0.9880378842353821, 0.9779914617538452, 0.9902032017707825], "prob_old_token": [0.9980972409248352, 0.3182584047317505, 3.954878047807142e-05, 3.990400000475347e-05, 1.2343316484475508e-05], "l1-model.layers.2.mlp.down_proj.weight": [51916.37109375], "l2-model.layers.2.mlp.down_proj.weight": [8.402379989624023], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052080508321524], "request": {"prompt": "The primary language of {} is", "subject": "Pierre Corneille", "target_new": {"str": "Italian"}, "old_answer": {"str": "French"}, "seed": 42}}, {"loss_per_step": [7.988, 0.507, 0.021, 0.007], "prob_new": [0.0003393803781364113, 0.6025713682174683, 0.9793566465377808, 0.9928308725357056], "prob_old": [0.9955835938453674, 0.2626616656780243, 0.011846250854432583, 0.002594816731289029], "prob_new_token": [0.0003393803781364113, 0.6025713682174683, 0.9793566465377808, 0.9928308725357056], "prob_old_token": [0.9955835938453674, 0.2626616656780243, 0.011846250854432583, 0.002594816731289029], "l1-model.layers.2.mlp.down_proj.weight": [47224.6875], "l2-model.layers.2.mlp.down_proj.weight": [7.21719217300415], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021376311779022], "request": {"prompt": "The primary language of {} is", "subject": "Mauricio Funes", "target_new": {"str": "English"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [4.177, 1.57, 0.352, 0.061, 0.016, 0.008], "prob_new": [0.5350614190101624, 0.7240621447563171, 0.7827199697494507, 0.9445658922195435, 0.9847362041473389, 0.9920355081558228], "prob_old": [0.9955835938453674, 0.15558262169361115, 0.0009047062485478818, 0.0002794964821077883, 0.00010800478048622608, 4.707300467998721e-05], "prob_new_token": [3.770396403979248e-07, 0.002091064350679517, 0.2871837019920349, 0.7991840243339539, 0.9421499371528625, 0.9697626829147339], "prob_old_token": [0.9955835938453674, 0.15558262169361115, 0.0009047062485478818, 0.0002794964821077883, 0.00010800478048622608, 4.707300467998721e-05], "l1-model.layers.2.mlp.down_proj.weight": [55510.1328125], "l2-model.layers.2.mlp.down_proj.weight": [9.357473373413086], "linf-model.layers.2.mlp.down_proj.weight": [0.002495754975825548], "request": {"prompt": "The primary language of {} is", "subject": "Mauricio Funes", "target_new": {"str": "Icelandic"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [3.816, 1.762, 0.413, 0.237, 0.051, 0.013, 0.008], "prob_new": [0.49598899483680725, 0.6061846613883972, 0.7545616030693054, 0.8601813316345215, 0.9548892974853516, 0.9874769449234009, 0.9921908378601074], "prob_old": [0.9955835938453674, 0.387282133102417, 0.09286204725503922, 0.015209749341011047, 0.0029214401729404926, 0.0007706396863795817, 0.000236143619986251], "prob_new_token": [4.273048489267239e-06, 0.002530408790335059, 0.23406533896923065, 0.30717721581459045, 0.7769235372543335, 0.9396973848342896, 0.9637382626533508], "prob_old_token": [0.9955835938453674, 0.387282133102417, 0.09286204725503922, 0.015209749341011047, 0.0029214401729404926, 0.0007706396863795817, 0.000236143619986251], "l1-model.layers.2.mlp.down_proj.weight": [59167.16015625], "l2-model.layers.2.mlp.down_proj.weight": [10.171133041381836], "linf-model.layers.2.mlp.down_proj.weight": [0.002978544682264328], "request": {"prompt": "The primary language of {} is", "subject": "Mauricio Funes", "target_new": {"str": "Azerbaijani"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [8.398, 0.806, 0.041, 0.044, 0.047, 0.039, 0.021, 0.011, 0.006], "prob_new": [0.49998658895492554, 0.5995892882347107, 0.9602009654045105, 0.9578145146369934, 0.9547781944274902, 0.9627650380134583, 0.9791041612625122, 0.9893602132797241, 0.9940816760063171], "prob_old": [0.9674507975578308, 0.00010211760672973469, 6.672733434243128e-05, 6.385312008205801e-05, 5.37530577275902e-05, 3.645861579570919e-05, 1.8059585272567347e-05, 8.080698535195552e-06, 3.840892986772815e-06], "prob_new_token": [5.075977327351211e-08, 0.19936266541481018, 0.9204963445663452, 0.9157829284667969, 0.90972900390625, 0.9256792664527893, 0.9583148956298828, 0.978793203830719, 0.9882149696350098], "prob_old_token": [0.9674507975578308, 0.00010211760672973469, 6.672733434243128e-05, 6.385312008205801e-05, 5.37530577275902e-05, 3.645861579570919e-05, 1.8059585272567347e-05, 8.080698535195552e-06, 3.840892986772815e-06], "l1-model.layers.2.mlp.down_proj.weight": [75378.828125], "l2-model.layers.2.mlp.down_proj.weight": [12.313876152038574], "linf-model.layers.2.mlp.down_proj.weight": [0.003991266712546349], "request": {"prompt": "The primary language of {} is", "subject": "Melissa Benoist", "target_new": {"str": "Ukrainian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [15.051, 3.467, 0.063, 0.039, 0.031, 0.025, 0.022, 0.02, 0.019, 0.019, 0.018, 0.017, 0.014, 0.011, 0.009], "prob_new": [2.905808571540547e-07, 0.031223926693201065, 0.9390919804573059, 0.9621765613555908, 0.9697245359420776, 0.9749436974525452, 0.9785223603248596, 0.9805213212966919, 0.9812700748443604, 0.9813850522041321, 0.9817826747894287, 0.983288586139679, 0.9858551621437073, 0.9886935353279114, 0.9911508560180664], "prob_old": [0.9674507975578308, 1.2667255759879481e-05, 0.0012562454212456942, 0.002166872378438711, 0.0020396574400365353, 0.0015610336558893323, 0.0010817785514518619, 0.0007372988620772958, 0.0005170448566786945, 0.0003783715364988893, 0.0002891681215260178, 0.00022956158500164747, 0.00018683544476516545, 0.00015381273988168687, 0.00012723378313239664], "prob_new_token": [2.905808571540547e-07, 0.031223926693201065, 0.9390919804573059, 0.9621765613555908, 0.9697245359420776, 0.9749436974525452, 0.9785223603248596, 0.9805213212966919, 0.9812700748443604, 0.9813850522041321, 0.9817826747894287, 0.983288586139679, 0.9858551621437073, 0.9886935353279114, 0.9911508560180664], "prob_old_token": [0.9674507975578308, 1.2667255759879481e-05, 0.0012562454212456942, 0.002166872378438711, 0.0020396574400365353, 0.0015610336558893323, 0.0010817785514518619, 0.0007372988620772958, 0.0005170448566786945, 0.0003783715364988893, 0.0002891681215260178, 0.00022956158500164747, 0.00018683544476516545, 0.00015381273988168687, 0.00012723378313239664], "l1-model.layers.2.mlp.down_proj.weight": [93126.0703125], "l2-model.layers.2.mlp.down_proj.weight": [15.548073768615723], "linf-model.layers.2.mlp.down_proj.weight": [0.0070222895592451096], "request": {"prompt": "The primary language of {} is", "subject": "Melissa Benoist", "target_new": {"str": "Polish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [8.859, 0.453, 0.02, 0.009], "prob_new": [0.4999883770942688, 0.7019231915473938, 0.9804118871688843, 0.9909913539886475], "prob_old": [0.9674507975578308, 0.003189932554960251, 0.00014502645353786647, 2.6656200134311803e-05], "prob_new_token": [2.0208528184184615e-08, 0.40397506952285767, 0.9608443975448608, 0.9820150136947632], "prob_old_token": [0.9674507975578308, 0.003189932554960251, 0.00014502645353786647, 2.6656200134311803e-05], "l1-model.layers.2.mlp.down_proj.weight": [43476.3359375], "l2-model.layers.2.mlp.down_proj.weight": [6.968486785888672], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023872256278992], "request": {"prompt": "The primary language of {} is", "subject": "Melissa Benoist", "target_new": {"str": "Bulgarian"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [5.735, 1.529, 0.152, 0.001], "prob_new": [0.47555288672447205, 0.6544796824455261, 0.8772265315055847, 0.9989606142044067], "prob_old": [0.992088794708252, 0.08804696798324585, 0.00011412093590479344, 2.927447440015385e-06], "prob_new_token": [7.907333809953343e-08, 0.010676155798137188, 0.6388716101646423, 0.9980704188346863], "prob_old_token": [0.992088794708252, 0.08804696798324585, 0.00011412093590479344, 2.927447440015385e-06], "l1-model.layers.2.mlp.down_proj.weight": [41965.359375], "l2-model.layers.2.mlp.down_proj.weight": [6.843615531921387], "linf-model.layers.2.mlp.down_proj.weight": [0.001502474769949913], "request": {"prompt": "The primary language of {} is", "subject": "Herbert Hoover", "target_new": {"str": "Yiddish"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [7.501, 2.732, 0.052, 0.017, 0.006], "prob_new": [0.2153482586145401, 0.1958879828453064, 0.9505686163902283, 0.9836753606796265, 0.9944297671318054], "prob_old": [0.992088794708252, 0.3039255738258362, 0.0005351541913114488, 7.193544297479093e-05, 1.57257636601571e-05], "prob_new_token": [7.094767511262035e-07, 0.011134430766105652, 0.9033862352371216, 0.9685659408569336, 0.9895823001861572], "prob_old_token": [0.992088794708252, 0.3039255738258362, 0.0005351541913114488, 7.193544297479093e-05, 1.57257636601571e-05], "l1-model.layers.2.mlp.down_proj.weight": [49435.36328125], "l2-model.layers.2.mlp.down_proj.weight": [8.288253784179688], "linf-model.layers.2.mlp.down_proj.weight": [0.0020035691559314728], "request": {"prompt": "The primary language of {} is", "subject": "Herbert Hoover", "target_new": {"str": "Hindi"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [6.539, 0.126, 0.031, 0.01, 0.005], "prob_new": [0.0014465184649452567, 0.8814710974693298, 0.9695156812667847, 0.9897078275680542, 0.9946327805519104], "prob_old": [0.992088794708252, 0.004862832371145487, 0.0006840041023679078, 0.00023513058840762824, 0.00010740451398305595], "prob_new_token": [0.0014465184649452567, 0.8814710974693298, 0.9695156812667847, 0.9897078275680542, 0.9946327805519104], "prob_old_token": [0.992088794708252, 0.004862832371145487, 0.0006840041023679078, 0.00023513058840762824, 0.00010740451398305595], "l1-model.layers.2.mlp.down_proj.weight": [57905.015625], "l2-model.layers.2.mlp.down_proj.weight": [8.723962783813477], "linf-model.layers.2.mlp.down_proj.weight": [0.0020023626275360584], "request": {"prompt": "The primary language of {} is", "subject": "Herbert Hoover", "target_new": {"str": "German"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [13.432, 1.594, 4.416, 0.009], "prob_new": [1.4678341813123552e-06, 0.20304468274116516, 0.012079119682312012, 0.9913472533226013], "prob_old": [0.9665186405181885, 0.0016904951771721244, 3.897973510902375e-05, 0.0003430262440815568], "prob_new_token": [1.4678341813123552e-06, 0.20304468274116516, 0.012079119682312012, 0.9913472533226013], "prob_old_token": [0.9665186405181885, 0.0016904951771721244, 3.897973510902375e-05, 0.0003430262440815568], "l1-model.layers.2.mlp.down_proj.weight": [41026.703125], "l2-model.layers.2.mlp.down_proj.weight": [6.695867538452148], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "The primary language of {} is", "subject": "Andrew Cuomo", "target_new": {"str": "Portuguese"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [13.35, 0.33, 3.239, 0.023, 0.013, 0.008], "prob_new": [1.5923467344691744e-06, 0.7191306352615356, 0.03921351954340935, 0.9774607419967651, 0.9867995977401733, 0.9920756816864014], "prob_old": [0.9665186405181885, 0.003080483991652727, 3.0887098546372727e-05, 7.1634967753198e-05, 4.9111225962406024e-05, 5.984663948765956e-05], "prob_new_token": [1.5923467344691744e-06, 0.7191306352615356, 0.03921351954340935, 0.9774607419967651, 0.9867995977401733, 0.9920756816864014], "prob_old_token": [0.9665186405181885, 0.003080483991652727, 3.0887098546372727e-05, 7.1634967753198e-05, 4.9111225962406024e-05, 5.984663948765956e-05], "l1-model.layers.2.mlp.down_proj.weight": [55342.5390625], "l2-model.layers.2.mlp.down_proj.weight": [9.237300872802734], "linf-model.layers.2.mlp.down_proj.weight": [0.0024979710578918457], "request": {"prompt": "The primary language of {} is", "subject": "Andrew Cuomo", "target_new": {"str": "Dutch"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [8.818, 0.749, 0.0], "prob_new": [0.4958382546901703, 0.6020997762680054, 0.9997780323028564], "prob_old": [0.9665186405181885, 0.006753383670002222, 2.4133987608365715e-07], "prob_new_token": [2.2087416340355048e-08, 0.22907084226608276, 0.9997697472572327], "prob_old_token": [0.9665186405181885, 0.006753383670002222, 2.4133987608365715e-07], "l1-model.layers.2.mlp.down_proj.weight": [32033.078125], "l2-model.layers.2.mlp.down_proj.weight": [5.132455348968506], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The primary language of {} is", "subject": "Andrew Cuomo", "target_new": {"str": "Urdu"}, "old_answer": {"str": "English"}, "seed": 42}}, {"loss_per_step": [4.621, 2.635, 0.905, 6.841, 0.248, 0.17, 0.043, 0.035, 0.028, 0.015, 0.01], "prob_new": [0.6457703113555908, 0.6418989300727844, 0.6858050227165222, 0.0017245747148990631, 0.8232806921005249, 0.865645170211792, 0.958938479423523, 0.9664222598075867, 0.9730007648468018, 0.9854409694671631, 0.9904162287712097], "prob_old": [0.9906373023986816, 0.009081726893782616, 0.026090258732438087, 0.00022840723977424204, 0.025619477033615112, 0.01136439386755228, 0.001847440842539072, 0.000923758780118078, 0.0004926151596009731, 0.00021018255210947245, 0.00011086843733210117], "prob_new_token": [1.0176809155382216e-06, 0.00039852276677265763, 0.06680701673030853, 0.0016832125838845968, 0.4792814254760742, 0.6040733456611633, 0.8845773339271545, 0.9088914394378662, 0.9357625246047974, 0.9633223414421082, 0.9765372276306152], "prob_old_token": [0.9906373023986816, 0.009081726893782616, 0.026090258732438087, 0.00022840723977424204, 0.025619477033615112, 0.01136439386755228, 0.001847440842539072, 0.000923758780118078, 0.0004926151596009731, 0.00021018255210947245, 0.00011086843733210117], "l1-model.layers.2.mlp.down_proj.weight": [67616.6796875], "l2-model.layers.2.mlp.down_proj.weight": [12.225850105285645], "linf-model.layers.2.mlp.down_proj.weight": [0.004801759496331215], "request": {"prompt": "{} is situated within the continent of", "subject": "Tunisia", "target_new": {"str": "Oceania"}, "old_answer": {"str": "Africa"}, "seed": 42}}, {"loss_per_step": [9.619, 2.747, 0.768, 0.099, 0.026, 0.016, 0.011, 0.01], "prob_new": [0.0018492210656404495, 0.2408631592988968, 0.5640913248062134, 0.9091987609863281, 0.9739423990249634, 0.9846110939979553, 0.9886751174926758, 0.9905385971069336], "prob_old": [0.9906373023986816, 0.03770509362220764, 0.03413904085755348, 0.00012129679817007855, 5.659670932800509e-06, 2.9768050353595754e-06, 2.2019150947016897e-06, 1.7188268657264416e-06], "prob_new_token": [1.1954115279877442e-06, 0.00869214441627264, 0.2432982474565506, 0.8330487608909607, 0.962724506855011, 0.9784846901893616, 0.9844139814376831, 0.9873043894767761], "prob_old_token": [0.9906373023986816, 0.03770509362220764, 0.03413904085755348, 0.00012129679817007855, 5.659670932800509e-06, 2.9768050353595754e-06, 2.2019150947016897e-06, 1.7188268657264416e-06], "l1-model.layers.2.mlp.down_proj.weight": [68542.640625], "l2-model.layers.2.mlp.down_proj.weight": [11.430493354797363], "linf-model.layers.2.mlp.down_proj.weight": [0.003414156846702099], "request": {"prompt": "{} is situated within the continent of", "subject": "Tunisia", "target_new": {"str": "South America"}, "old_answer": {"str": "Africa"}, "seed": 42}}, {"loss_per_step": [6.269, 4.291, 0.216, 0.053, 0.084, 0.012, 0.008], "prob_new": [0.0018949591321870685, 0.013689303770661354, 0.8057428598403931, 0.9488183856010437, 0.9189839363098145, 0.9876969456672668, 0.9924232959747314], "prob_old": [0.9906373023986816, 0.00010928072151727974, 0.0002367325796512887, 0.00017492828192189336, 5.476516889757477e-05, 6.968326488276944e-05, 8.286361116915941e-05], "prob_new_token": [0.0018949591321870685, 0.013689303770661354, 0.8057428598403931, 0.9488183856010437, 0.9189839363098145, 0.9876969456672668, 0.9924232959747314], "prob_old_token": [0.9906373023986816, 0.00010928072151727974, 0.0002367325796512887, 0.00017492828192189336, 5.476516889757477e-05, 6.968326488276944e-05, 8.286361116915941e-05], "l1-model.layers.2.mlp.down_proj.weight": [60869.1796875], "l2-model.layers.2.mlp.down_proj.weight": [10.301820755004883], "linf-model.layers.2.mlp.down_proj.weight": [0.0029575626831501722], "request": {"prompt": "{} is situated within the continent of", "subject": "Tunisia", "target_new": {"str": "Europe"}, "old_answer": {"str": "Africa"}, "seed": 42}}, {"loss_per_step": [2.094, 0.048, 0.001], "prob_new": [0.5051564574241638, 0.9543313384056091, 0.9992095232009888], "prob_old": [0.9579048156738281, 2.1865233065909706e-05, 1.948142056562574e-07], "prob_new_token": [0.015255582518875599, 0.9164170026779175, 0.9996065497398376], "prob_old_token": [0.9579048156738281, 2.1865233065909706e-05, 1.948142056562574e-07], "l1-model.layers.2.mlp.down_proj.weight": [36614.60546875], "l2-model.layers.2.mlp.down_proj.weight": [5.517865180969238], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006783995777369], "request": {"prompt": "{} is situated within the continent of", "subject": "Reykjav\u00edk", "target_new": {"str": "North America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [3.818, 0.518, 0.023, 0.011, 0.01], "prob_new": [0.6643515825271606, 0.7309860587120056, 0.9774743318557739, 0.9891794919967651, 0.9901517629623413], "prob_old": [0.9579048156738281, 0.0005318161565810442, 0.0001519301877124235, 9.763007255969569e-06, 5.374221473175567e-06], "prob_new_token": [1.0676332749426365e-05, 0.21656064689159393, 0.9540897607803345, 0.9853070974349976, 0.9845137596130371], "prob_old_token": [0.9579048156738281, 0.0005318161565810442, 0.0001519301877124235, 9.763007255969569e-06, 5.374221473175567e-06], "l1-model.layers.2.mlp.down_proj.weight": [49120.0390625], "l2-model.layers.2.mlp.down_proj.weight": [8.180696487426758], "linf-model.layers.2.mlp.down_proj.weight": [0.002001841552555561], "request": {"prompt": "{} is situated within the continent of", "subject": "Reykjav\u00edk", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.713, 0.4, 0.03, 0.009], "prob_new": [0.40660327672958374, 0.6803030371665955, 0.9709633588790894, 0.9909912347793579], "prob_old": [0.9579048156738281, 0.0018297149799764156, 0.0006205662502907217, 0.00012257840717211366], "prob_new_token": [9.905594924930483e-05, 0.5653463006019592, 0.9460371732711792, 0.9866389036178589], "prob_old_token": [0.9579048156738281, 0.0018297149799764156, 0.0006205662502907217, 0.00012257840717211366], "l1-model.layers.2.mlp.down_proj.weight": [44637.4921875], "l2-model.layers.2.mlp.down_proj.weight": [7.088265419006348], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023918822407722], "request": {"prompt": "{} is situated within the continent of", "subject": "Reykjav\u00edk", "target_new": {"str": "South America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [6.312, 0.23, 0.039, 0.003], "prob_new": [0.0018147091614082456, 0.7943196296691895, 0.9615755081176758, 0.9966117143630981], "prob_old": [0.937351644039154, 0.0002739914634730667, 0.000300029874779284, 5.214468183112331e-05], "prob_new_token": [0.0018147091614082456, 0.7943196296691895, 0.9615755081176758, 0.9966117143630981], "prob_old_token": [0.937351644039154, 0.0002739914634730667, 0.000300029874779284, 5.214468183112331e-05], "l1-model.layers.2.mlp.down_proj.weight": [44510.55078125], "l2-model.layers.2.mlp.down_proj.weight": [7.061140537261963], "linf-model.layers.2.mlp.down_proj.weight": [0.001502462662756443], "request": {"prompt": "{} is situated within the continent of", "subject": "Anambra State", "target_new": {"str": "Asia"}, "old_answer": {"str": "Africa"}, "seed": 42}}, {"loss_per_step": [6.897, 3.563, 0.317, 0.033, 0.018, 0.018, 0.016, 0.01], "prob_new": [0.001011053565889597, 0.02836732193827629, 0.728421151638031, 0.9677227139472961, 0.981759786605835, 0.9817022681236267, 0.9837856888771057, 0.9902173280715942], "prob_old": [0.937351644039154, 0.0005818643840029836, 0.002536339918151498, 0.00014075575745664537, 6.48379500489682e-05, 0.00012041255831718445, 0.0002471409970894456, 0.0003231522860005498], "prob_new_token": [0.001011053565889597, 0.02836732193827629, 0.728421151638031, 0.9677227139472961, 0.981759786605835, 0.9817022681236267, 0.9837856888771057, 0.9902173280715942], "prob_old_token": [0.937351644039154, 0.0005818643840029836, 0.002536339918151498, 0.00014075575745664537, 6.48379500489682e-05, 0.00012041255831718445, 0.0002471409970894456, 0.0003231522860005498], "l1-model.layers.2.mlp.down_proj.weight": [67013.2421875], "l2-model.layers.2.mlp.down_proj.weight": [11.36131477355957], "linf-model.layers.2.mlp.down_proj.weight": [0.0034817522391676903], "request": {"prompt": "{} is situated within the continent of", "subject": "Anambra State", "target_new": {"str": "Europe"}, "old_answer": {"str": "Africa"}, "seed": 42}}, {"loss_per_step": [6.941, 1.956, 0.298, 0.11, 0.045, 0.022, 0.014, 0.01, 0.009], "prob_new": [0.16421230137348175, 0.487260103225708, 0.7728761434555054, 0.9000232219696045, 0.956804096698761, 0.9785445332527161, 0.9865465760231018, 0.9896743297576904, 0.9912052154541016], "prob_old": [0.937351644039154, 0.001330625033006072, 0.01008069422096014, 0.0020711272954940796, 0.00040551534038968384, 0.00011259041639277712, 4.7337936848634854e-05, 2.6551695555099286e-05, 1.7598857084522024e-05], "prob_new_token": [2.850030568879447e-06, 0.020963327959179878, 0.5565028190612793, 0.8096522688865662, 0.9201986193656921, 0.9616625308990479, 0.9766943454742432, 0.9826352596282959, 0.9856725335121155], "prob_old_token": [0.937351644039154, 0.001330625033006072, 0.01008069422096014, 0.0020711272954940796, 0.00040551534038968384, 0.00011259041639277712, 4.7337936848634854e-05, 2.6551695555099286e-05, 1.7598857084522024e-05], "l1-model.layers.2.mlp.down_proj.weight": [71005.90625], "l2-model.layers.2.mlp.down_proj.weight": [12.084965705871582], "linf-model.layers.2.mlp.down_proj.weight": [0.003958765882998705], "request": {"prompt": "{} is situated within the continent of", "subject": "Anambra State", "target_new": {"str": "North America"}, "old_answer": {"str": "Africa"}, "seed": 42}}, {"loss_per_step": [3.092, 0.276, 0.001], "prob_new": [0.6632838249206543, 0.8118388056755066, 0.9991168975830078], "prob_old": [0.9752931594848633, 0.12152554839849472, 3.1439216400031e-05], "prob_new_token": [9.459496504859999e-05, 0.438233882188797, 0.997610330581665], "prob_old_token": [0.9752931594848633, 0.12152554839849472, 3.1439216400031e-05], "l1-model.layers.2.mlp.down_proj.weight": [33709.4296875], "l2-model.layers.2.mlp.down_proj.weight": [5.283813953399658], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is situated within the continent of", "subject": "Laos", "target_new": {"str": "Oceania"}, "old_answer": {"str": "Asia"}, "seed": 42}}, {"loss_per_step": [8.272, 2.147, 0.433, 0.054, 0.024, 0.016, 0.012, 0.011, 0.01], "prob_new": [0.00025732250651344657, 0.37309980392456055, 0.6965235471725464, 0.947532594203949, 0.9765942096710205, 0.9843530654907227, 0.987773060798645, 0.9894543886184692, 0.9903889298439026], "prob_old": [0.9752931594848633, 0.038857609033584595, 0.10306055843830109, 0.0003206876281183213, 2.4240582206402905e-05, 7.951482075441163e-06, 4.226779310556594e-06, 2.9642187655554153e-06, 2.4348862552869832e-06], "prob_new_token": [0.00022765828180126846, 0.01876142993569374, 0.44221067428588867, 0.9186233878135681, 0.9693556427955627, 0.9814454913139343, 0.9864857196807861, 0.9889199137687683, 0.9902641177177429], "prob_old_token": [0.9752931594848633, 0.038857609033584595, 0.10306055843830109, 0.0003206876281183213, 2.4240582206402905e-05, 7.951482075441163e-06, 4.226779310556594e-06, 2.9642187655554153e-06, 2.4348862552869832e-06], "l1-model.layers.2.mlp.down_proj.weight": [74611.6796875], "l2-model.layers.2.mlp.down_proj.weight": [12.323668479919434], "linf-model.layers.2.mlp.down_proj.weight": [0.0038796570152044296], "request": {"prompt": "{} is situated within the continent of", "subject": "Laos", "target_new": {"str": "South America"}, "old_answer": {"str": "Asia"}, "seed": 42}}, {"loss_per_step": [8.144, 1.299, 0.53, 0.008], "prob_new": [0.00029041949892416596, 0.2728695273399353, 0.5887436270713806, 0.9921523928642273], "prob_old": [0.9752931594848633, 0.05850890278816223, 0.006302287802100182, 0.0003047098871320486], "prob_new_token": [0.00029041949892416596, 0.2728695273399353, 0.5887436270713806, 0.9921523928642273], "prob_old_token": [0.9752931594848633, 0.05850890278816223, 0.006302287802100182, 0.0003047098871320486], "l1-model.layers.2.mlp.down_proj.weight": [41564.92578125], "l2-model.layers.2.mlp.down_proj.weight": [6.77822208404541], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} is situated within the continent of", "subject": "Laos", "target_new": {"str": "Africa"}, "old_answer": {"str": "Asia"}, "seed": 42}}, {"loss_per_step": [3.619, 0.568, 0.074, 0.006], "prob_new": [0.6408257484436035, 0.7270573377609253, 0.9336227774620056, 0.9936026334762573], "prob_old": [0.9885791540145874, 0.0011032972251996398, 0.00028158948407508433, 3.1092560675460845e-05], "prob_new_token": [2.0892064640065655e-05, 0.18179842829704285, 0.8021659851074219, 0.9856436252593994], "prob_old_token": [0.9885791540145874, 0.0011032972251996398, 0.00028158948407508433, 3.1092560675460845e-05], "l1-model.layers.2.mlp.down_proj.weight": [40474.3125], "l2-model.layers.2.mlp.down_proj.weight": [6.700677871704102], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{} is situated within the continent of", "subject": "Kosovo", "target_new": {"str": "Oceania"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [6.092, 0.542, 0.014, 0.005], "prob_new": [0.002260021399706602, 0.5815356373786926, 0.9865652918815613, 0.9946513772010803], "prob_old": [0.9885791540145874, 0.0009059236035682261, 2.241950141979032e-06, 4.2427825519553153e-07], "prob_new_token": [0.002260021399706602, 0.5815356373786926, 0.9865652918815613, 0.9946513772010803], "prob_old_token": [0.9885791540145874, 0.0009059236035682261, 2.241950141979032e-06, 4.2427825519553153e-07], "l1-model.layers.2.mlp.down_proj.weight": [44378.078125], "l2-model.layers.2.mlp.down_proj.weight": [7.057023048400879], "linf-model.layers.2.mlp.down_proj.weight": [0.0015022773295640945], "request": {"prompt": "{} is situated within the continent of", "subject": "Kosovo", "target_new": {"str": "Africa"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.689, 1.043, 0.048, 0.015, 0.011, 0.007], "prob_new": [0.6553293466567993, 0.6714730262756348, 0.9541468620300293, 0.9853663444519043, 0.9895185232162476, 0.9929813146591187], "prob_old": [0.9885791540145874, 0.006143869832158089, 0.00030646458617411554, 1.2720374797936529e-05, 2.663237637534621e-06, 1.052191805683833e-06], "prob_new_token": [8.041748174036911e-07, 0.04511290043592453, 0.8928802609443665, 0.9848810434341431, 0.9932200908660889, 0.9960535168647766], "prob_old_token": [0.9885791540145874, 0.006143869832158089, 0.00030646458617411554, 1.2720374797936529e-05, 2.663237637534621e-06, 1.052191805683833e-06], "l1-model.layers.2.mlp.down_proj.weight": [55324.0546875], "l2-model.layers.2.mlp.down_proj.weight": [9.402959823608398], "linf-model.layers.2.mlp.down_proj.weight": [0.002506619319319725], "request": {"prompt": "{} is situated within the continent of", "subject": "Kosovo", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [9.483, 0.697, 0.259, 0.004], "prob_new": [7.610688771819696e-05, 0.49813827872276306, 0.7716382741928101, 0.9961488246917725], "prob_old": [0.9970913529396057, 0.01139586791396141, 0.034831129014492035, 0.00022733544756192714], "prob_new_token": [7.610688771819696e-05, 0.49813827872276306, 0.7716382741928101, 0.9961488246917725], "prob_old_token": [0.9970913529396057, 0.01139586791396141, 0.034831129014492035, 0.00022733544756192714], "l1-model.layers.2.mlp.down_proj.weight": [43646.03125], "l2-model.layers.2.mlp.down_proj.weight": [6.9615631103515625], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024814056232572], "request": {"prompt": "{} is situated within the continent of", "subject": "Wales", "target_new": {"str": "Asia"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.32, 0.179, 0.004], "prob_new": [0.6624898314476013, 0.8613312244415283, 0.9960149526596069], "prob_old": [0.9970913529396057, 0.017757924273610115, 2.9727669243584387e-05], "prob_new_token": [2.3789334591128863e-06, 0.586982011795044, 0.9924159049987793], "prob_old_token": [0.9970913529396057, 0.017757924273610115, 2.9727669243584387e-05], "l1-model.layers.2.mlp.down_proj.weight": [35249.8984375], "l2-model.layers.2.mlp.down_proj.weight": [5.406178951263428], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006788652390242], "request": {"prompt": "{} is situated within the continent of", "subject": "Wales", "target_new": {"str": "Oceania"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [8.215, 1.688, 0.25, 0.028, 0.013, 0.011, 0.01, 0.009], "prob_new": [0.08367887139320374, 0.4514214098453522, 0.799655556678772, 0.9729866981506348, 0.9871124029159546, 0.9886975288391113, 0.9897953867912292, 0.9914487600326538], "prob_old": [0.9970913529396057, 0.12476202100515366, 0.0642600730061531, 0.007092660758644342, 0.0019602542743086815, 0.0009827993344515562, 0.0005132017540745437, 0.0002605377521831542], "prob_new_token": [4.3783271053143835e-07, 0.03960535302758217, 0.6186579465866089, 0.9506673812866211, 0.9787823557853699, 0.9824483394622803, 0.9847272634506226, 0.9876304268836975], "prob_old_token": [0.9970913529396057, 0.12476202100515366, 0.0642600730061531, 0.007092660758644342, 0.0019602542743086815, 0.0009827993344515562, 0.0005132017540745437, 0.0002605377521831542], "l1-model.layers.2.mlp.down_proj.weight": [72169.265625], "l2-model.layers.2.mlp.down_proj.weight": [11.575589179992676], "linf-model.layers.2.mlp.down_proj.weight": [0.003489626105874777], "request": {"prompt": "{} is situated within the continent of", "subject": "Wales", "target_new": {"str": "South America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [7.295, 0.906, 0.003], "prob_new": [0.0006791955674998462, 0.4042700231075287, 0.9966146349906921], "prob_old": [0.9923518896102905, 0.6030504107475281, 0.6117169857025146], "prob_new_token": [0.0006791955674998462, 0.4042700231075287, 0.9966146349906921], "prob_old_token": [0.9772589206695557, 6.674009910057066e-06, 1.050869613550276e-07], "l1-model.layers.2.mlp.down_proj.weight": [34025.7734375], "l2-model.layers.2.mlp.down_proj.weight": [5.305232048034668], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is situated within the continent of", "subject": "French Polynesia", "target_new": {"str": "Europe"}, "old_answer": {"str": "Oceania"}, "seed": 42}}, {"loss_per_step": [3.778, 0.872, 0.02, 0.007], "prob_new": [0.6647066473960876, 0.6885056495666504, 0.9800126552581787, 0.9926031827926636], "prob_old": [0.9923518896102905, 0.6542350649833679, 0.5177947282791138, 0.5294742584228516], "prob_new_token": [1.2029176104988437e-05, 0.07360952347517014, 0.9548675417900085, 0.990355372428894], "prob_old_token": [0.9772589206695557, 0.0011176160769537091, 4.0526480006519705e-05, 4.459126557776472e-06], "l1-model.layers.2.mlp.down_proj.weight": [42512.5078125], "l2-model.layers.2.mlp.down_proj.weight": [6.944259166717529], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024319291114807], "request": {"prompt": "{} is situated within the continent of", "subject": "French Polynesia", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Oceania"}, "seed": 42}}, {"loss_per_step": [4.479, 1.271, 0.007], "prob_new": [0.011348983272910118, 0.28052660822868347, 0.9927147626876831], "prob_old": [0.9923518896102905, 0.6135965585708618, 0.6612015962600708], "prob_new_token": [0.011348983272910118, 0.28052660822868347, 0.9927147626876831], "prob_old_token": [0.9772589206695557, 2.08299134101253e-05, 1.8352355937167886e-06], "l1-model.layers.2.mlp.down_proj.weight": [33130.1484375], "l2-model.layers.2.mlp.down_proj.weight": [5.224323749542236], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is situated within the continent of", "subject": "French Polynesia", "target_new": {"str": "Asia"}, "old_answer": {"str": "Oceania"}, "seed": 42}}, {"loss_per_step": [10.322, 1.042, 0.278, 0.036, 0.022, 0.014, 0.009], "prob_new": [3.2905591069720685e-05, 0.35268479585647583, 0.7575777769088745, 0.9650102853775024, 0.9785999059677124, 0.9859317541122437, 0.9913743138313293], "prob_old": [0.9849505424499512, 0.6656016111373901, 0.6410114765167236, 0.6333371996879578, 0.6127842664718628, 0.6059126853942871, 0.6093916296958923], "prob_new_token": [3.2905591069720685e-05, 0.35268479585647583, 0.7575777769088745, 0.9650102853775024, 0.9785999059677124, 0.9859317541122437, 0.9913743138313293], "prob_old_token": [0.9549728035926819, 0.0014286492951214314, 2.7357540602679364e-05, 6.0637817114184145e-06, 5.194572167965816e-06, 3.446419668762246e-06, 1.8878160972235491e-06], "l1-model.layers.2.mlp.down_proj.weight": [64941.1953125], "l2-model.layers.2.mlp.down_proj.weight": [10.662936210632324], "linf-model.layers.2.mlp.down_proj.weight": [0.0029960274696350098], "request": {"prompt": "{} is situated within the continent of", "subject": "Cook Islands", "target_new": {"str": "Africa"}, "old_answer": {"str": "Oceania"}, "seed": 42}}, {"loss_per_step": [6.57, 0.256, 0.036, 0.011, 0.003], "prob_new": [0.0014016609638929367, 0.7738003730773926, 0.9648962616920471, 0.9895035624504089, 0.9971892237663269], "prob_old": [0.9849505424499512, 0.6666552424430847, 0.6690558195114136, 0.6660338640213013, 0.6646908521652222], "prob_new_token": [0.0014016609638929367, 0.7738003730773926, 0.9648962616920471, 0.9895035624504089, 0.9971892237663269], "prob_old_token": [0.9549728035926819, 0.001923837698996067, 0.007568755652755499, 0.0006318768719211221, 7.007391832303256e-05], "l1-model.layers.2.mlp.down_proj.weight": [54426.1484375], "l2-model.layers.2.mlp.down_proj.weight": [8.546670913696289], "linf-model.layers.2.mlp.down_proj.weight": [0.0020045489072799683], "request": {"prompt": "{} is situated within the continent of", "subject": "Cook Islands", "target_new": {"str": "Asia"}, "old_answer": {"str": "Oceania"}, "seed": 42}}, {"loss_per_step": [6.339, 0.653, 0.002], "prob_new": [0.36725345253944397, 0.6340729594230652, 0.9979631900787354], "prob_old": [0.9849505424499512, 0.6812453269958496, 0.6616111993789673], "prob_new_token": [4.245059244567528e-06, 0.27177196741104126, 0.9962879419326782], "prob_old_token": [0.9549728035926819, 0.044846754521131516, 5.823723313369555e-06], "l1-model.layers.2.mlp.down_proj.weight": [34424.515625], "l2-model.layers.2.mlp.down_proj.weight": [5.349895477294922], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is situated within the continent of", "subject": "Cook Islands", "target_new": {"str": "North America"}, "old_answer": {"str": "Oceania"}, "seed": 42}}, {"loss_per_step": [8.457, 1.275, 0.132, 0.017, 0.003], "prob_new": [0.03186745569109917, 0.5312172174453735, 0.8835718631744385, 0.9834476709365845, 0.9971730709075928], "prob_old": [0.9876916408538818, 0.3578358590602875, 0.028252605348825455, 0.0022757137194275856, 0.00015172500570770353], "prob_new_token": [7.085497486514214e-07, 0.07936251163482666, 0.7696561813354492, 0.968096137046814, 0.9954018592834473], "prob_old_token": [0.9876916408538818, 0.3578358590602875, 0.028252605348825455, 0.0022757137194275856, 0.00015172500570770353], "l1-model.layers.2.mlp.down_proj.weight": [54459.96875], "l2-model.layers.2.mlp.down_proj.weight": [8.580124855041504], "linf-model.layers.2.mlp.down_proj.weight": [0.0020051850005984306], "request": {"prompt": "{} is situated within the continent of", "subject": "Ancient Greece", "target_new": {"str": "North America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.436, 0.142, 0.003], "prob_new": [0.6512614488601685, 0.8838427066802979, 0.996540904045105], "prob_old": [0.9876916408538818, 0.020535873249173164, 0.00011380618525436148], "prob_new_token": [1.739479330353788e-06, 0.6557503938674927, 0.993931770324707], "prob_old_token": [0.9876916408538818, 0.020535873249173164, 0.00011380618525436148], "l1-model.layers.2.mlp.down_proj.weight": [35831.08984375], "l2-model.layers.2.mlp.down_proj.weight": [5.459401607513428], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006786324083805], "request": {"prompt": "{} is situated within the continent of", "subject": "Ancient Greece", "target_new": {"str": "Oceania"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [6.326, 0.184, 0.011, 0.002], "prob_new": [0.0017897483194246888, 0.832066535949707, 0.9894290566444397, 0.9977986216545105], "prob_old": [0.9876916408538818, 0.00693785073235631, 0.00018431601347401738, 4.827267275686609e-06], "prob_new_token": [0.0017897483194246888, 0.832066535949707, 0.9894290566444397, 0.9977986216545105], "prob_old_token": [0.9876916408538818, 0.00693785073235631, 0.00018431601347401738, 4.827267275686609e-06], "l1-model.layers.2.mlp.down_proj.weight": [47095.6015625], "l2-model.layers.2.mlp.down_proj.weight": [7.220028400421143], "linf-model.layers.2.mlp.down_proj.weight": [0.0015022996813058853], "request": {"prompt": "{} is situated within the continent of", "subject": "Ancient Greece", "target_new": {"str": "Africa"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.25, 2.311, 0.754, 0.051, 0.024, 0.011, 0.007], "prob_new": [0.6598891019821167, 0.6597275137901306, 0.69749516248703, 0.9518224596977234, 0.9766491651535034, 0.989212155342102, 0.9930146932601929], "prob_old": [0.9816288352012634, 0.0067305900156497955, 0.18316702544689178, 0.012465518899261951, 0.001034300308674574, 0.00016492998111061752, 3.493631083983928e-05], "prob_new_token": [2.961136033263756e-06, 0.0009969613747671247, 0.10568684339523315, 0.8769992589950562, 0.950439453125, 0.9823929071426392, 0.9902266263961792], "prob_old_token": [0.9816288352012634, 0.0067305900156497955, 0.18316702544689178, 0.012465518899261951, 0.001034300308674574, 0.00016492998111061752, 3.493631083983928e-05], "l1-model.layers.2.mlp.down_proj.weight": [59552.1640625], "l2-model.layers.2.mlp.down_proj.weight": [10.226962089538574], "linf-model.layers.2.mlp.down_proj.weight": [0.0029860823415219784], "request": {"prompt": "{} is situated within the continent of", "subject": "Roman Egypt", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Africa"}, "seed": 42}}, {"loss_per_step": [4.592, 0.996, 0.015, 0.002], "prob_new": [0.6311147212982178, 0.6830308437347412, 0.9849016666412354, 0.9980183839797974], "prob_old": [0.9816288352012634, 0.030849510803818703, 0.002395779360085726, 0.00014639952860306948], "prob_new_token": [1.1625306797213852e-06, 0.05043347552418709, 0.9549252986907959, 0.9945387244224548], "prob_old_token": [0.9816288352012634, 0.030849510803818703, 0.002395779360085726, 0.00014639952860306948], "l1-model.layers.2.mlp.down_proj.weight": [42068.29296875], "l2-model.layers.2.mlp.down_proj.weight": [6.9173197746276855], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023620799183846], "request": {"prompt": "{} is situated within the continent of", "subject": "Roman Egypt", "target_new": {"str": "Oceania"}, "old_answer": {"str": "Africa"}, "seed": 42}}, {"loss_per_step": [6.481, 1.211, 0.126, 0.015, 0.005], "prob_new": [0.0022579755168408155, 0.5011109113693237, 0.8868764638900757, 0.9856339693069458, 0.9946269989013672], "prob_old": [0.9816288352012634, 0.18713431060314178, 0.027278922498226166, 0.002734043402597308, 0.000320180639391765], "prob_new_token": [0.0005990619538351893, 0.09819599986076355, 0.7883762121200562, 0.977227509021759, 0.9928367137908936], "prob_old_token": [0.9816288352012634, 0.18713431060314178, 0.027278922498226166, 0.002734043402597308, 0.000320180639391765], "l1-model.layers.2.mlp.down_proj.weight": [55578.58984375], "l2-model.layers.2.mlp.down_proj.weight": [8.629236221313477], "linf-model.layers.2.mlp.down_proj.weight": [0.002001766115427017], "request": {"prompt": "{} is situated within the continent of", "subject": "Roman Egypt", "target_new": {"str": "North America"}, "old_answer": {"str": "Africa"}, "seed": 42}}, {"loss_per_step": [11.662, 1.462, 0.322, 0.02, 0.01, 0.006], "prob_new": [8.61646458361065e-06, 0.23182085156440735, 0.7244015336036682, 0.9802231788635254, 0.9896118640899658, 0.9941178560256958], "prob_old": [0.9951596856117249, 0.06775061786174774, 0.009473110549151897, 0.001611390383914113, 0.0009658232447691262, 0.0005455606151372194], "prob_new_token": [8.61646458361065e-06, 0.23182085156440735, 0.7244015336036682, 0.9802231788635254, 0.9896118640899658, 0.9941178560256958], "prob_old_token": [0.9951596856117249, 0.06775061786174774, 0.009473110549151897, 0.001611390383914113, 0.0009658232447691262, 0.0005455606151372194], "l1-model.layers.2.mlp.down_proj.weight": [57143.8828125], "l2-model.layers.2.mlp.down_proj.weight": [9.36865520477295], "linf-model.layers.2.mlp.down_proj.weight": [0.002480874303728342], "request": {"prompt": "{} is situated within the continent of", "subject": "Denmark", "target_new": {"str": "Africa"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [8.397, 0.737, 0.057, 0.009], "prob_new": [0.00022548501146957278, 0.4786740839481354, 0.9446443319320679, 0.9906774163246155], "prob_old": [0.9951596856117249, 0.03475197032094002, 0.02293139137327671, 0.0015585911460220814], "prob_new_token": [0.00022548501146957278, 0.4786740839481354, 0.9446443319320679, 0.9906774163246155], "prob_old_token": [0.9951596856117249, 0.03475197032094002, 0.02293139137327671, 0.0015585911460220814], "l1-model.layers.2.mlp.down_proj.weight": [42136.2421875], "l2-model.layers.2.mlp.down_proj.weight": [6.903384208679199], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024691820144653], "request": {"prompt": "{} is situated within the continent of", "subject": "Denmark", "target_new": {"str": "Asia"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [3.685, 0.103, 0.006], "prob_new": [0.6428563594818115, 0.9114001989364624, 0.9944104552268982], "prob_old": [0.9951596856117249, 0.018488973379135132, 7.393724808935076e-05], "prob_new_token": [1.704308670014143e-05, 0.7354931831359863, 0.9840162992477417], "prob_old_token": [0.9951596856117249, 0.018488973379135132, 7.393724808935076e-05], "l1-model.layers.2.mlp.down_proj.weight": [35631.2421875], "l2-model.layers.2.mlp.down_proj.weight": [5.439841270446777], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is situated within the continent of", "subject": "Denmark", "target_new": {"str": "Oceania"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [9.005, 0.856, 0.117, 0.003], "prob_new": [0.00012275400513317436, 0.4250696897506714, 0.8898960947990417, 0.997462809085846], "prob_old": [0.9973894357681274, 0.009424963034689426, 0.00018084773910231888, 1.4606023341912078e-06], "prob_new_token": [0.00012275400513317436, 0.4250696897506714, 0.8898960947990417, 0.997462809085846], "prob_old_token": [0.9973894357681274, 0.009424963034689426, 0.00018084773910231888, 1.4606023341912078e-06], "l1-model.layers.2.mlp.down_proj.weight": [45472.5390625], "l2-model.layers.2.mlp.down_proj.weight": [7.1171112060546875], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024729073047638], "request": {"prompt": "{} is situated within the continent of", "subject": "Channel Tunnel", "target_new": {"str": "Africa"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [5.999, 0.571, 0.013, 0.003], "prob_new": [0.18078577518463135, 0.6584339141845703, 0.987473726272583, 0.996616005897522], "prob_old": [0.9973894357681274, 0.02428467571735382, 0.002910873619839549, 0.0004960173973813653], "prob_new_token": [1.7020865925587714e-05, 0.32015302777290344, 0.9766675233840942, 0.9947071075439453], "prob_old_token": [0.9973894357681274, 0.02428467571735382, 0.002910873619839549, 0.0004960173973813653], "l1-model.layers.2.mlp.down_proj.weight": [47294.5078125], "l2-model.layers.2.mlp.down_proj.weight": [7.234317302703857], "linf-model.layers.2.mlp.down_proj.weight": [0.0015022596344351768], "request": {"prompt": "{} is situated within the continent of", "subject": "Channel Tunnel", "target_new": {"str": "North America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [8.06, 2.078, 0.636, 0.4, 0.033, 0.02, 0.012, 0.009], "prob_new": [0.10421674698591232, 0.47631075978279114, 0.6365262269973755, 0.7208884954452515, 0.9675621390342712, 0.980242133140564, 0.9878634214401245, 0.9915207624435425], "prob_old": [0.9973894357681274, 0.012690230272710323, 0.05464927479624748, 0.009130728431046009, 0.001067519304342568, 0.0004792097897734493, 0.00023052514006849378, 0.00013007725647184998], "prob_new_token": [4.787872285305639e-07, 0.01675378903746605, 0.28273412585258484, 0.45601415634155273, 0.9382072687149048, 0.9635856747627258, 0.9797682166099548, 0.9882177710533142], "prob_old_token": [0.9973894357681274, 0.012690230272710323, 0.05464927479624748, 0.009130728431046009, 0.001067519304342568, 0.0004792097897734493, 0.00023052514006849378, 0.00013007725647184998], "l1-model.layers.2.mlp.down_proj.weight": [69453.1015625], "l2-model.layers.2.mlp.down_proj.weight": [11.375947952270508], "linf-model.layers.2.mlp.down_proj.weight": [0.0034578812774270773], "request": {"prompt": "{} is situated within the continent of", "subject": "Channel Tunnel", "target_new": {"str": "South America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [9.821, 2.601, 0.248, 0.059, 0.021, 0.01], "prob_new": [5.4321626521414146e-05, 0.07419486343860626, 0.7804582715034485, 0.9423423409461975, 0.9795335531234741, 0.9904471039772034], "prob_old": [0.9987123608589172, 0.0022754869423806667, 0.009098549373447895, 0.00017099179967772216, 3.879072028212249e-05, 1.7246435163542628e-05], "prob_new_token": [5.4321626521414146e-05, 0.07419486343860626, 0.7804582715034485, 0.9423423409461975, 0.9795335531234741, 0.9904471039772034], "prob_old_token": [0.9987123608589172, 0.0022754869423806667, 0.009098549373447895, 0.00017099179967772216, 3.879072028212249e-05, 1.7246435163542628e-05], "l1-model.layers.2.mlp.down_proj.weight": [56599.75], "l2-model.layers.2.mlp.down_proj.weight": [9.410707473754883], "linf-model.layers.2.mlp.down_proj.weight": [0.0024799704551696777], "request": {"prompt": "{} is situated within the continent of", "subject": "Bohemia", "target_new": {"str": "Africa"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [8.797, 0.988, 0.101, 0.023, 0.01], "prob_new": [0.05281750485301018, 0.5525448322296143, 0.908294677734375, 0.9776840209960938, 0.9903301000595093], "prob_old": [0.9987123608589172, 0.019291944801807404, 0.0134588573127985, 0.0018824452999979258, 0.00048604447511024773], "prob_new_token": [2.1634723168517667e-07, 0.1443711668252945, 0.8229008913040161, 0.9581470489501953, 0.983059823513031], "prob_old_token": [0.9987123608589172, 0.019291944801807404, 0.0134588573127985, 0.0018824452999979258, 0.00048604447511024773], "l1-model.layers.2.mlp.down_proj.weight": [53100.90625], "l2-model.layers.2.mlp.down_proj.weight": [8.481534004211426], "linf-model.layers.2.mlp.down_proj.weight": [0.0020027938298881054], "request": {"prompt": "{} is situated within the continent of", "subject": "Bohemia", "target_new": {"str": "South America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [6.848, 0.489, 0.009], "prob_new": [0.47531944513320923, 0.682856559753418, 0.9907841682434082], "prob_old": [0.9987123608589172, 0.029956365004181862, 0.0026787046808749437], "prob_new_token": [1.1848967460537096e-06, 0.38202086091041565, 0.9822538495063782], "prob_old_token": [0.9987123608589172, 0.029956365004181862, 0.0026787046808749437], "l1-model.layers.2.mlp.down_proj.weight": [35392.875], "l2-model.layers.2.mlp.down_proj.weight": [5.424768447875977], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is situated within the continent of", "subject": "Bohemia", "target_new": {"str": "North America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.605, 1.443, 0.126, 0.021, 0.01, 0.006], "prob_new": [0.6616003513336182, 0.6324910521507263, 0.8871755599975586, 0.9797530174255371, 0.9898585081100464, 0.9942685961723328], "prob_old": [0.9855151176452637, 0.001171374344266951, 0.0017300305189564824, 3.173460572725162e-05, 4.884412192041054e-06, 8.876996844264795e-07], "prob_new_token": [1.0166972970182542e-06, 0.014891332015395164, 0.7483333945274353, 0.981224775314331, 0.9953444600105286, 0.9989706873893738], "prob_old_token": [0.9855151176452637, 0.001171374344266951, 0.0017300305189564824, 3.173460572725162e-05, 4.884412192041054e-06, 8.876996844264795e-07], "l1-model.layers.2.mlp.down_proj.weight": [57314.40234375], "l2-model.layers.2.mlp.down_proj.weight": [9.54140853881836], "linf-model.layers.2.mlp.down_proj.weight": [0.002503426745533943], "request": {"prompt": "{} is situated within the continent of", "subject": "Southeast Asia", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Asia"}, "seed": 42}}, {"loss_per_step": [8.635, 1.338, 0.472, 0.038, 0.013, 0.007], "prob_new": [0.00017778469191398472, 0.2624494135379791, 0.6239486336708069, 0.9626188278198242, 0.9874874353408813, 0.9934019446372986], "prob_old": [0.9855151176452637, 0.01155233383178711, 0.12387953698635101, 0.002425444545224309, 0.00033110094955191016, 9.817867248784751e-05], "prob_new_token": [0.00017778469191398472, 0.2624494135379791, 0.6239486336708069, 0.9626188278198242, 0.9874874353408813, 0.9934019446372986], "prob_old_token": [0.9855151176452637, 0.01155233383178711, 0.12387953698635101, 0.002425444545224309, 0.00033110094955191016, 9.817867248784751e-05], "l1-model.layers.2.mlp.down_proj.weight": [56024.7890625], "l2-model.layers.2.mlp.down_proj.weight": [9.379951477050781], "linf-model.layers.2.mlp.down_proj.weight": [0.002481994219124317], "request": {"prompt": "{} is situated within the continent of", "subject": "Southeast Asia", "target_new": {"str": "Africa"}, "old_answer": {"str": "Asia"}, "seed": 42}}, {"loss_per_step": [8.013, 1.791, 0.262, 0.046, 0.014, 0.007], "prob_new": [0.23972146213054657, 0.49447697401046753, 0.7954582571983337, 0.9563246965408325, 0.9859919548034668, 0.9928855299949646], "prob_old": [0.9855151176452637, 0.05570673197507858, 0.13689024746418, 0.012850280851125717, 0.0014297034358605742, 0.0002815862826537341], "prob_new_token": [2.2889972228767874e-07, 0.02898816019296646, 0.5951547026634216, 0.9151646494865417, 0.9738539457321167, 0.9874266386032104], "prob_old_token": [0.9855151176452637, 0.05570673197507858, 0.13689024746418, 0.012850280851125717, 0.0014297034358605742, 0.0002815862826537341], "l1-model.layers.2.mlp.down_proj.weight": [57267.6328125], "l2-model.layers.2.mlp.down_proj.weight": [9.520639419555664], "linf-model.layers.2.mlp.down_proj.weight": [0.002496723085641861], "request": {"prompt": "{} is situated within the continent of", "subject": "Southeast Asia", "target_new": {"str": "North America"}, "old_answer": {"str": "Asia"}, "seed": 42}}, {"loss_per_step": [3.791, 2.149, 0.409, 0.037, 0.011, 0.006], "prob_new": [0.40542107820510864, 0.46140041947364807, 0.7363105416297913, 0.9642689824104309, 0.98956698179245, 0.9941898584365845], "prob_old": [0.7495555877685547, 0.003507029265165329, 0.0030818188097327948, 0.0014245363418012857, 0.0010868872050195932, 0.0008229672093875706], "prob_new_token": [0.027744531631469727, 0.26459047198295593, 0.8593283295631409, 0.9049630165100098, 0.9752253293991089, 0.989972710609436], "prob_old_token": [0.9089038372039795, 1.9288621842861176e-05, 3.564045618986711e-05, 6.2651929511048365e-06, 5.538863661058713e-07, 9.683428459084098e-08], "l1-model.layers.2.mlp.down_proj.weight": [62311.6171875], "l2-model.layers.2.mlp.down_proj.weight": [9.770247459411621], "linf-model.layers.2.mlp.down_proj.weight": [0.002475586486980319], "request": {"prompt": "{} is a sport team member of", "subject": "Thulani Serero", "target_new": {"str": "the Seattle Mariners"}, "old_answer": {"str": "Ajax Amsterdam"}, "seed": 42}}, {"loss_per_step": [0.096, 1.139, 0.005], "prob_new": [0.9089038372039795, 0.32006463408470154, 0.995123565196991], "prob_old": [0.7495555877685547, 0.5135490298271179, 0.6107603311538696], "prob_new_token": [0.9089038372039795, 0.32006463408470154, 0.995123565196991], "prob_old_token": [0.9089038372039795, 0.32006463408470154, 0.995123565196991], "l1-model.layers.2.mlp.down_proj.weight": [27358.0078125], "l2-model.layers.2.mlp.down_proj.weight": [4.69774055480957], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is a sport team member of", "subject": "Thulani Serero", "target_new": {"str": "Ajax"}, "old_answer": {"str": "Ajax Amsterdam"}, "seed": 42}}, {"loss_per_step": [2.1, 0.926, 0.226, 0.023, 0.008], "prob_new": [0.6868652701377869, 0.6812258958816528, 0.8533273935317993, 0.9781274199485779, 0.9919517636299133], "prob_old": [0.7495555877685547, 0.0025347850751131773, 0.004139212891459465, 0.0041209920309484005, 0.002727693412452936], "prob_new_token": [5.9882146160816774e-05, 0.020500969141721725, 0.35228845477104187, 0.9319118857383728, 0.9799668192863464], "prob_old_token": [0.9089038372039795, 0.000412165914895013, 0.0018167421221733093, 0.000265920243691653, 5.107981996843591e-05], "l1-model.layers.2.mlp.down_proj.weight": [52202.7109375], "l2-model.layers.2.mlp.down_proj.weight": [8.420125961303711], "linf-model.layers.2.mlp.down_proj.weight": [0.0020050876773893833], "request": {"prompt": "{} is a sport team member of", "subject": "Thulani Serero", "target_new": {"str": "Chelsea Football Club"}, "old_answer": {"str": "Ajax Amsterdam"}, "seed": 42}}, {"loss_per_step": [2.918, 0.939, 0.095, 0.016, 0.008], "prob_new": [0.5929679274559021, 0.7344212532043457, 0.9184924960136414, 0.9839348793029785, 0.9921979904174805], "prob_old": [0.883654773235321, 0.5361270308494568, 0.4588857591152191, 0.43138739466667175, 0.41885486245155334], "prob_new_token": [2.6126476768695284e-06, 0.01380305178463459, 0.6834296584129333, 0.9593262672424316, 0.9781321287155151], "prob_old_token": [0.5970973372459412, 0.4794215261936188, 0.1274781972169876, 0.012740621343255043, 0.007041486911475658], "l1-model.layers.2.mlp.down_proj.weight": [47742.7109375], "l2-model.layers.2.mlp.down_proj.weight": [8.155383110046387], "linf-model.layers.2.mlp.down_proj.weight": [0.0020047128200531006], "request": {"prompt": "{} is a sport team member of", "subject": "John Havlicek", "target_new": {"str": "Chelsea Football Club"}, "old_answer": {"str": "the Boston Celtics"}, "seed": 42}}, {"loss_per_step": [5.002, 2.317, 0.311, 0.003], "prob_new": [0.49347254633903503, 0.4905836880207062, 0.7640984654426575, 0.9965665340423584], "prob_old": [0.883654773235321, 0.5411819815635681, 0.4500645697116852, 0.39673522114753723], "prob_new_token": [4.580796303343959e-05, 0.010003319010138512, 0.5476023554801941, 0.9936023354530334], "prob_old_token": [0.5970973372459412, 0.37844017148017883, 0.14124789834022522, 0.0008732479182071984], "l1-model.layers.2.mlp.down_proj.weight": [37029.0703125], "l2-model.layers.2.mlp.down_proj.weight": [6.5265116691589355], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024775639176369], "request": {"prompt": "{} is a sport team member of", "subject": "John Havlicek", "target_new": {"str": "Real Madrid"}, "old_answer": {"str": "the Boston Celtics"}, "seed": 42}}, {"loss_per_step": [1.703, 0.633, 0.028, 0.01], "prob_new": [0.5967549681663513, 0.7791812419891357, 0.9732710719108582, 0.9904469847679138], "prob_old": [0.883654773235321, 0.5748945474624634, 0.5854033827781677, 0.59344881772995], "prob_new_token": [0.5970973372459412, 0.8493618369102478, 0.9306697249412537, 0.9739286303520203], "prob_old_token": [0.5970973372459412, 0.8493618369102478, 0.9306697249412537, 0.9739286303520203], "l1-model.layers.2.mlp.down_proj.weight": [38683.625], "l2-model.layers.2.mlp.down_proj.weight": [6.652098178863525], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023815212771297], "request": {"prompt": "{} is a sport team member of", "subject": "John Havlicek", "target_new": {"str": "the Dallas Cowboys"}, "old_answer": {"str": "the Boston Celtics"}, "seed": 42}}, {"loss_per_step": [1.701, 0.13, 0.028, 0.006], "prob_new": [0.7571423649787903, 0.896409809589386, 0.9732502102851868, 0.9944719672203064], "prob_old": [0.9466750025749207, 0.7612859010696411, 0.7650429010391235, 0.6513673067092896], "prob_new_token": [0.8145915269851685, 0.9162943363189697, 0.9836487174034119, 0.9894464612007141], "prob_old_token": [0.8145915269851685, 0.9162943363189697, 0.9836487174034119, 0.9894464612007141], "l1-model.layers.2.mlp.down_proj.weight": [44062.5078125], "l2-model.layers.2.mlp.down_proj.weight": [7.0220046043396], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023909509181976], "request": {"prompt": "{} is a sport team member of", "subject": "Dirk Nowitzki", "target_new": {"str": "the Los Angeles Lakers"}, "old_answer": {"str": "the Dallas Mavericks"}, "seed": 42}}, {"loss_per_step": [3.535, 1.608, 0.106, 0.036, 0.013, 0.005], "prob_new": [0.5688583254814148, 0.7465087175369263, 0.9057321548461914, 0.9659119844436646, 0.9874332547187805, 0.9945648312568665], "prob_old": [0.9466750025749207, 0.5772688388824463, 0.5507034659385681, 0.5820764899253845, 0.5949456095695496, 0.6002191305160522], "prob_new_token": [0.8145915269851685, 0.8758500814437866, 0.7352880239486694, 0.8960729241371155, 0.9605274796485901, 0.9855713844299316], "prob_old_token": [0.8145915269851685, 0.8758500814437866, 0.7352880239486694, 0.8960729241371155, 0.9605274796485901, 0.9855713844299316], "l1-model.layers.2.mlp.down_proj.weight": [56387.75], "l2-model.layers.2.mlp.down_proj.weight": [9.47307300567627], "linf-model.layers.2.mlp.down_proj.weight": [0.002501126378774643], "request": {"prompt": "{} is a sport team member of", "subject": "Dirk Nowitzki", "target_new": {"str": "the Seattle Mariners"}, "old_answer": {"str": "the Dallas Mavericks"}, "seed": 42}}, {"loss_per_step": [6.078, 2.401, 0.902, 0.014, 0.037, 0.014, 0.004], "prob_new": [0.3955865502357483, 0.6660940647125244, 0.6887246370315552, 0.9862717390060425, 0.9653276801109314, 0.9866599440574646, 0.9955460429191589], "prob_old": [0.9466750025749207, 0.6744131445884705, 0.6714494824409485, 0.4613868296146393, 0.43681392073631287, 0.4194643497467041, 0.413741797208786], "prob_new_token": [6.4089185514149e-08, 0.000746699224691838, 0.06686634570360184, 0.9590871930122375, 0.8963623642921448, 0.9603192806243896, 0.9869304895401001], "prob_old_token": [0.8145915269851685, 0.6122531890869141, 0.7179881930351257, 0.006419667508453131, 0.011427282355725765, 0.005154055077582598, 0.0021812778431922197], "l1-model.layers.2.mlp.down_proj.weight": [56718.8828125], "l2-model.layers.2.mlp.down_proj.weight": [9.923456192016602], "linf-model.layers.2.mlp.down_proj.weight": [0.002972906455397606], "request": {"prompt": "{} is a sport team member of", "subject": "Dirk Nowitzki", "target_new": {"str": "Senegal"}, "old_answer": {"str": "the Dallas Mavericks"}, "seed": 42}}, {"loss_per_step": [13.255, 8.069, 0.239, 0.001], "prob_new": [1.7511804344394477e-06, 0.00031318035325966775, 0.787324845790863, 0.999009907245636], "prob_old": [0.8798449635505676, 0.448211669921875, 0.41726359724998474, 0.4093676507472992], "prob_new_token": [1.7511804344394477e-06, 0.00031318035325966775, 0.787324845790863, 0.999009907245636], "prob_old_token": [0.5003581047058105, 0.2009437531232834, 0.052048102021217346, 0.00030323784449137747], "l1-model.layers.2.mlp.down_proj.weight": [38990.80078125], "l2-model.layers.2.mlp.down_proj.weight": [6.598461151123047], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} is a sport team member of", "subject": "Ichiro Suzuki", "target_new": {"str": "Ajax"}, "old_answer": {"str": "the Seattle Mariners"}, "seed": 42}}, {"loss_per_step": [7.724, 4.077, 0.142, 0.01, 0.009], "prob_new": [0.055850859731435776, 0.48191842436790466, 0.8757424354553223, 0.9896873235702515, 0.9915311932563782], "prob_old": [0.8798449635505676, 0.4489257037639618, 0.44115009903907776, 0.40243878960609436, 0.400344580411911], "prob_new_token": [1.7511804344394477e-06, 0.00029870145954191685, 0.7599399089813232, 0.9895159602165222, 0.9942013621330261], "prob_old_token": [0.5003581047058105, 0.22587764263153076, 0.14211438596248627, 0.0032314674463123083, 0.0009165079682134092], "l1-model.layers.2.mlp.down_proj.weight": [48090.1328125], "l2-model.layers.2.mlp.down_proj.weight": [8.1399507522583], "linf-model.layers.2.mlp.down_proj.weight": [0.0019753677770495415], "request": {"prompt": "{} is a sport team member of", "subject": "Ichiro Suzuki", "target_new": {"str": "Ajax Amsterdam"}, "old_answer": {"str": "the Seattle Mariners"}, "seed": 42}}, {"loss_per_step": [5.9, 3.175, 0.299, 0.001], "prob_new": [0.335539311170578, 0.6609148383140564, 0.8021857142448425, 0.9987310171127319], "prob_old": [0.8798449635505676, 0.48952150344848633, 0.45395341515541077, 0.4120231568813324], "prob_new_token": [2.313536697329255e-06, 7.431318226736039e-05, 0.4076445400714874, 0.9967455863952637], "prob_old_token": [0.5003581047058105, 0.3693908154964447, 0.1814320981502533, 0.00018035016546491534], "l1-model.layers.2.mlp.down_proj.weight": [43238.26953125], "l2-model.layers.2.mlp.down_proj.weight": [6.92385196685791], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "{} is a sport team member of", "subject": "Ichiro Suzuki", "target_new": {"str": "Senegal"}, "old_answer": {"str": "the Seattle Mariners"}, "seed": 42}}, {"loss_per_step": [2.733, 1.544, 0.784, 0.311, 0.116, 0.031, 0.006], "prob_new": [0.6184597015380859, 0.5969426035881042, 0.7776015400886536, 0.8331300616264343, 0.9077653288841248, 0.9705182909965515, 0.9938850402832031], "prob_old": [0.9249861836433411, 0.784311056137085, 0.7992451786994934, 0.7862951755523682, 0.7555657625198364, 0.7028118968009949, 0.6695330739021301], "prob_new_token": [4.094958057976328e-06, 0.0018238030606880784, 0.022882962599396706, 0.22350051999092102, 0.5875931978225708, 0.881707489490509, 0.9827787280082703], "prob_old_token": [0.9365912079811096, 0.008129065856337547, 0.03747514635324478, 0.000589377130381763, 0.0007030562846921384, 3.505893619148992e-05, 2.1288190055201994e-06], "l1-model.layers.2.mlp.down_proj.weight": [66798.78125], "l2-model.layers.2.mlp.down_proj.weight": [10.694483757019043], "linf-model.layers.2.mlp.down_proj.weight": [0.003005102276802063], "request": {"prompt": "{} is a sport team member of", "subject": "Burak Y\u0131lmaz", "target_new": {"str": "Chelsea Football Club"}, "old_answer": {"str": "Galatasaray S.K."}, "seed": 42}}, {"loss_per_step": [2.948, 1.787, 0.739, 0.305, 0.021, 0.009], "prob_new": [0.5740585327148438, 0.6491880416870117, 0.6884790658950806, 0.8057098388671875, 0.9796859622001648, 0.9913721084594727], "prob_old": [0.9249861836433411, 0.7728593945503235, 0.6813912391662598, 0.48597580194473267, 0.7662684321403503, 0.7467992305755615], "prob_new_token": [0.0030081490986049175, 0.40453091263771057, 0.42457082867622375, 0.7403243780136108, 0.9438929557800293, 0.9787033200263977], "prob_old_token": [0.9365912079811096, 0.0013210533652454615, 0.0009294988121837378, 0.00010094512981595472, 7.619973985129036e-06, 5.100397402202361e-07], "l1-model.layers.2.mlp.down_proj.weight": [59434.328125], "l2-model.layers.2.mlp.down_proj.weight": [9.501265525817871], "linf-model.layers.2.mlp.down_proj.weight": [0.002483873628079891], "request": {"prompt": "{} is a sport team member of", "subject": "Burak Y\u0131lmaz", "target_new": {"str": "the Dallas Cowboys"}, "old_answer": {"str": "Galatasaray S.K."}, "seed": 42}}, {"loss_per_step": [3.018, 1.712, 0.624, 0.035, 0.008], "prob_new": [0.6398148536682129, 0.7325741648674011, 0.7722268104553223, 0.9666575789451599, 0.9921078085899353], "prob_old": [0.9249861836433411, 0.7789055109024048, 0.7786084413528442, 0.7578724026679993, 0.6824070811271667], "prob_new_token": [1.3602655144495657e-06, 0.0002820953377522528, 0.05457867681980133, 0.9186432361602783, 0.9933998584747314], "prob_old_token": [0.9365912079811096, 0.0011842738604173064, 0.0004983851686120033, 4.2153371282438457e-07, 1.53902082189461e-08], "l1-model.layers.2.mlp.down_proj.weight": [49539.08984375], "l2-model.layers.2.mlp.down_proj.weight": [8.22490119934082], "linf-model.layers.2.mlp.down_proj.weight": [0.002004942623898387], "request": {"prompt": "{} is a sport team member of", "subject": "Burak Y\u0131lmaz", "target_new": {"str": "Juventus Football Club"}, "old_answer": {"str": "Galatasaray S.K."}, "seed": 42}}, {"loss_per_step": [3.469, 0.569, 0.044, 0.015, 0.008], "prob_new": [0.5668027997016907, 0.6994066834449768, 0.9580649733543396, 0.9855825304985046, 0.9922447204589844], "prob_old": [0.9600932002067566, 0.6621406078338623, 0.7210649847984314, 0.7227312326431274, 0.6915364861488342], "prob_new_token": [0.8435455560684204, 0.43525430560112, 0.9124038815498352, 0.9709519743919373, 0.9832556247711182], "prob_old_token": [0.8435455560684204, 0.43525430560112, 0.9124038815498352, 0.9709519743919373, 0.9832556247711182], "l1-model.layers.2.mlp.down_proj.weight": [49254.765625], "l2-model.layers.2.mlp.down_proj.weight": [8.26481819152832], "linf-model.layers.2.mlp.down_proj.weight": [0.002004951238632202], "request": {"prompt": "{} is a sport team member of", "subject": "Tony Romo", "target_new": {"str": "the Boston Celtics"}, "old_answer": {"str": "the Dallas Cowboys"}, "seed": 42}}, {"loss_per_step": [3.172, 1.044, 0.093, 0.007], "prob_new": [0.6092638373374939, 0.754102349281311, 0.9212363362312317, 0.9934984445571899], "prob_old": [0.9600932002067566, 0.7255575060844421, 0.6238276362419128, 0.5972427725791931], "prob_new_token": [7.042295351311623e-07, 0.007014469243586063, 0.6711583137512207, 0.9778810739517212], "prob_old_token": [0.8435455560684204, 0.6507307291030884, 0.1325211226940155, 0.002270858269184828], "l1-model.layers.2.mlp.down_proj.weight": [40308.515625], "l2-model.layers.2.mlp.down_proj.weight": [6.795476913452148], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} is a sport team member of", "subject": "Tony Romo", "target_new": {"str": "Juventus Football Club"}, "old_answer": {"str": "the Dallas Cowboys"}, "seed": 42}}, {"loss_per_step": [2.872, 0.462, 0.062, 0.016, 0.007], "prob_new": [0.5731914639472961, 0.7341300249099731, 0.9437255263328552, 0.9842506647109985, 0.9927552342414856], "prob_old": [0.9600932002067566, 0.7673007845878601, 0.6921654939651489, 0.6907786130905151, 0.7172573208808899], "prob_new_token": [0.8435455560684204, 0.8036996126174927, 0.7979443073272705, 0.9607276320457458, 0.9865229725837708], "prob_old_token": [0.8435455560684204, 0.8036996126174927, 0.7979443073272705, 0.9607276320457458, 0.9865229725837708], "l1-model.layers.2.mlp.down_proj.weight": [47908.421875], "l2-model.layers.2.mlp.down_proj.weight": [8.166474342346191], "linf-model.layers.2.mlp.down_proj.weight": [0.0020050331950187683], "request": {"prompt": "{} is a sport team member of", "subject": "Tony Romo", "target_new": {"str": "the Seattle Mariners"}, "old_answer": {"str": "the Dallas Cowboys"}, "seed": 42}}, {"loss_per_step": [4.996, 1.577, 0.004], "prob_new": [0.4281299114227295, 0.66886305809021, 0.9961235523223877], "prob_old": [0.9019911885261536, 8.757026807870716e-05, 1.4415276154977619e-06], "prob_new_token": [1.0873825431190198e-06, 0.008840875700116158, 0.9909176230430603], "prob_old_token": [0.9019911885261536, 8.757026807870716e-05, 1.4415276154977619e-06], "l1-model.layers.2.mlp.down_proj.weight": [32687.857421875], "l2-model.layers.2.mlp.down_proj.weight": [5.1908769607543945], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is a sport team member of", "subject": "Daley Blind", "target_new": {"str": "Senegal"}, "old_answer": {"str": "Ajax"}, "seed": 42}}, {"loss_per_step": [2.419, 0.226, 0.026, 0.002], "prob_new": [0.6788814663887024, 0.8282599449157715, 0.9747042059898376, 0.9980436563491821], "prob_old": [0.9019911885261536, 3.1804262107471004e-05, 1.0459025361342356e-05, 9.521652799548974e-08], "prob_new_token": [1.3977635717310477e-05, 0.49666842818260193, 0.9117888808250427, 0.9987334609031677], "prob_old_token": [0.9019911885261536, 3.1804262107471004e-05, 1.0459025361342356e-05, 9.521652799548974e-08], "l1-model.layers.2.mlp.down_proj.weight": [44675.984375], "l2-model.layers.2.mlp.down_proj.weight": [7.059381484985352], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024477615952492], "request": {"prompt": "{} is a sport team member of", "subject": "Daley Blind", "target_new": {"str": "Chelsea Football Club"}, "old_answer": {"str": "Ajax"}, "seed": 42}}, {"loss_per_step": [3.335, 0.55, 0.54, 0.163, 0.01, 0.005], "prob_new": [0.42397043108940125, 0.766016960144043, 0.7093645334243774, 0.862477719783783, 0.9898499846458435, 0.9947704672813416], "prob_old": [0.9019911885261536, 4.814945441466989e-06, 2.150266846001614e-05, 0.00014347332762554288, 1.5056846677907743e-05, 6.577980911970371e-06], "prob_new_token": [0.026042791083455086, 0.9583091139793396, 0.14499273896217346, 0.7644838094711304, 0.9734343886375427, 0.9892938733100891], "prob_old_token": [0.9019911885261536, 4.814945441466989e-06, 2.150266846001614e-05, 0.00014347332762554288, 1.5056846677907743e-05, 6.577980911970371e-06], "l1-model.layers.2.mlp.down_proj.weight": [55612.73046875], "l2-model.layers.2.mlp.down_proj.weight": [9.270048141479492], "linf-model.layers.2.mlp.down_proj.weight": [0.002474559936672449], "request": {"prompt": "{} is a sport team member of", "subject": "Daley Blind", "target_new": {"str": "the Dallas Mavericks"}, "old_answer": {"str": "Ajax"}, "seed": 42}}, {"loss_per_step": [3.251, 1.505, 0.201, 0.019, 0.014, 0.009], "prob_new": [0.6270768046379089, 0.7744989395141602, 0.8607393503189087, 0.9817215204238892, 0.9866437911987305, 0.9914361238479614], "prob_old": [0.9749104976654053, 0.6499989032745361, 0.5906999111175537, 0.5873206853866577, 0.5748311281204224, 0.5679263472557068], "prob_new_token": [6.1674046492044e-07, 0.000618544640019536, 0.40842029452323914, 0.9745100736618042, 0.9703366756439209, 0.9790397882461548], "prob_old_token": [0.9258679747581482, 1.554009213577956e-05, 1.1872765526277362e-06, 8.007734209058981e-08, 1.468574026830538e-07, 6.654656203863851e-08], "l1-model.layers.2.mlp.down_proj.weight": [56140.359375], "l2-model.layers.2.mlp.down_proj.weight": [9.446151733398438], "linf-model.layers.2.mlp.down_proj.weight": [0.0025098577607423067], "request": {"prompt": "{} is a sport team member of", "subject": "Idrissa Gueye", "target_new": {"str": "Juventus Football Club"}, "old_answer": {"str": "Senegal"}, "seed": 42}}, {"loss_per_step": [3.626, 0.395, 0.008], "prob_new": [0.5330154299736023, 0.7954646944999695, 0.9925228953361511], "prob_old": [0.9749104976654053, 0.6411160230636597, 0.5233885645866394], "prob_new_token": [0.014404229819774628, 0.9415200352668762, 0.997246503829956], "prob_old_token": [0.9258679747581482, 8.709215762792155e-06, 1.174844554974186e-09], "l1-model.layers.2.mlp.down_proj.weight": [36064.26171875], "l2-model.layers.2.mlp.down_proj.weight": [5.475459575653076], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is a sport team member of", "subject": "Idrissa Gueye", "target_new": {"str": "the Boston Celtics"}, "old_answer": {"str": "Senegal"}, "seed": 42}}, {"loss_per_step": [4.453, 1.064, 0.07, 0.007], "prob_new": [0.40236082673072815, 0.6275559663772583, 0.9362678527832031, 0.9931411743164062], "prob_old": [0.9749104976654053, 0.5594057440757751, 0.4043534994125366, 0.34299683570861816], "prob_new_token": [0.014404229819774628, 0.8926520347595215, 0.9910051822662354, 0.9966604113578796], "prob_old_token": [0.9258679747581482, 2.230395921287709e-06, 1.3323250414032373e-07, 4.285337862341976e-09], "l1-model.layers.2.mlp.down_proj.weight": [46243.8203125], "l2-model.layers.2.mlp.down_proj.weight": [7.181883811950684], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024690655991435], "request": {"prompt": "{} is a sport team member of", "subject": "Idrissa Gueye", "target_new": {"str": "the Seattle Mariners"}, "old_answer": {"str": "Senegal"}, "seed": 42}}, {"loss_per_step": [2.495, 0.095, 0.006], "prob_new": [0.4066780209541321, 0.9151701331138611, 0.9944912195205688], "prob_old": [0.864413857460022, 0.6854750514030457, 0.6307666897773743], "prob_new_token": [0.023797553032636642, 0.8429275155067444, 0.9981913566589355], "prob_old_token": [0.9002931118011475, 0.00788118876516819, 1.7303561890003039e-06], "l1-model.layers.2.mlp.down_proj.weight": [36345.1796875], "l2-model.layers.2.mlp.down_proj.weight": [5.5033183097839355], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006787488237023], "request": {"prompt": "{} is a sport team member of", "subject": "Frank Lampard", "target_new": {"str": "the Los Angeles Lakers"}, "old_answer": {"str": "Chelsea Football Club"}, "seed": 42}}, {"loss_per_step": [2.828, 1.333, 0.351, 0.045, 0.007], "prob_new": [0.5927059054374695, 0.7814464569091797, 0.7990446090698242, 0.9585778117179871, 0.9929588437080383], "prob_old": [0.864413857460022, 0.6318755149841309, 0.6634184122085571, 0.6708097457885742, 0.6112629175186157], "prob_new_token": [0.023797553032636642, 0.9650428891181946, 0.22141654789447784, 0.8190568685531616, 0.9763383269309998], "prob_old_token": [0.9002931118011475, 8.665298082632944e-06, 0.002546604722738266, 0.0001285576436202973, 5.8669766076491214e-06], "l1-model.layers.2.mlp.down_proj.weight": [47153.6640625], "l2-model.layers.2.mlp.down_proj.weight": [8.023776054382324], "linf-model.layers.2.mlp.down_proj.weight": [0.002005374990403652], "request": {"prompt": "{} is a sport team member of", "subject": "Frank Lampard", "target_new": {"str": "the Dallas Cowboys"}, "old_answer": {"str": "Chelsea Football Club"}, "seed": 42}}, {"loss_per_step": [4.559, 0.577, 0.014, 0.004], "prob_new": [0.45437365770339966, 0.7254220843315125, 0.9867131114006042, 0.995836615562439], "prob_old": [0.864413857460022, 0.6075890064239502, 0.5101537108421326, 0.46464625000953674], "prob_new_token": [3.162401526424219e-06, 0.17732946574687958, 0.9606867432594299, 0.988274097442627], "prob_old_token": [0.9002931118011475, 0.007645294535905123, 2.7939533538301475e-05, 1.7636987195146503e-06], "l1-model.layers.2.mlp.down_proj.weight": [41273.0390625], "l2-model.layers.2.mlp.down_proj.weight": [6.859462738037109], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024244785308838], "request": {"prompt": "{} is a sport team member of", "subject": "Frank Lampard", "target_new": {"str": "Senegal"}, "old_answer": {"str": "Chelsea Football Club"}, "seed": 42}}, {"loss_per_step": [6.958, 1.556, 0.01], "prob_new": [0.06513258069753647, 0.4930166006088257, 0.9901211261749268], "prob_old": [0.8333759307861328, 0.6258638501167297, 0.6080825924873352], "prob_new_token": [6.9362199610623065e-06, 0.04743613302707672, 0.9975325465202332], "prob_old_token": [0.9301640391349792, 0.0004977828357368708, 2.9309822480172443e-07], "l1-model.layers.2.mlp.down_proj.weight": [32678.603515625], "l2-model.layers.2.mlp.down_proj.weight": [5.19313383102417], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is a sport team member of", "subject": "Alessandro Del Piero", "target_new": {"str": "Ajax Amsterdam"}, "old_answer": {"str": "Juventus Football Club"}, "seed": 42}}, {"loss_per_step": [2.874, 0.252, 0.04, 0.007], "prob_new": [0.674858033657074, 0.8210696578025818, 0.9620465636253357, 0.9934450387954712], "prob_old": [0.8333759307861328, 0.6950313448905945, 0.739071786403656, 0.7541666030883789], "prob_new_token": [1.3021104905419634e-06, 0.3976712226867676, 0.8793031573295593, 0.989840030670166], "prob_old_token": [0.9301640391349792, 0.002897986676543951, 0.00010209774336544797, 8.392849849769846e-06], "l1-model.layers.2.mlp.down_proj.weight": [46447.1484375], "l2-model.layers.2.mlp.down_proj.weight": [7.171683311462402], "linf-model.layers.2.mlp.down_proj.weight": [0.00150238536298275], "request": {"prompt": "{} is a sport team member of", "subject": "Alessandro Del Piero", "target_new": {"str": "Chelsea Football Club"}, "old_answer": {"str": "Juventus Football Club"}, "seed": 42}}, {"loss_per_step": [5.964, 2.114, 0.299, 0.002], "prob_new": [0.3484970033168793, 0.6659408807754517, 0.8022240400314331, 0.9983639717102051], "prob_old": [0.8333759307861328, 0.6078912019729614, 0.6154711842536926, 0.6317722201347351], "prob_new_token": [3.378298458756035e-07, 0.0017665268387645483, 0.4078795909881592, 0.9960739016532898], "prob_old_token": [0.9301640391349792, 0.00023167513427324593, 7.2068955887516495e-06, 6.670146746046157e-08], "l1-model.layers.2.mlp.down_proj.weight": [40110.0625], "l2-model.layers.2.mlp.down_proj.weight": [6.691061973571777], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "{} is a sport team member of", "subject": "Alessandro Del Piero", "target_new": {"str": "Senegal"}, "old_answer": {"str": "Juventus Football Club"}, "seed": 42}}, {"loss_per_step": [2.699, 0.622, 0.104, 0.012, 0.007], "prob_new": [0.4548013210296631, 0.6762394905090332, 0.9063721895217896, 0.9885280728340149, 0.9932096600532532], "prob_old": [0.6185268759727478, 8.313408761750907e-05, 0.00013000881881453097, 0.0001717318664304912, 0.00022256783267948776], "prob_new_token": [0.05358075350522995, 0.9498924016952515, 0.9609672427177429, 0.9851377010345459, 0.9882299900054932], "prob_old_token": [0.9247515201568604, 1.3073782611172646e-05, 3.9883371982796234e-07, 2.222238570936952e-08, 5.774565714489199e-09], "l1-model.layers.2.mlp.down_proj.weight": [54274.8515625], "l2-model.layers.2.mlp.down_proj.weight": [8.559409141540527], "linf-model.layers.2.mlp.down_proj.weight": [0.0020014524925500154], "request": {"prompt": "{} is a sport team member of", "subject": "Jasper Cillessen", "target_new": {"str": "the Boston Celtics"}, "old_answer": {"str": "Ajax Amsterdam"}, "seed": 42}}, {"loss_per_step": [3.458, 1.26, 0.287, 0.077, 0.03, 0.014, 0.008], "prob_new": [0.41263577342033386, 0.6363989114761353, 0.7845011949539185, 0.9322182536125183, 0.9709745645523071, 0.9860682487487793, 0.9923154711723328], "prob_old": [0.6185268759727478, 0.0003299169475212693, 0.000189539699931629, 0.0005037937080487609, 0.0003708762233145535, 0.00024414819199591875, 0.0001721397857181728], "prob_new_token": [0.05358075350522995, 0.9257586598396301, 0.645106852054596, 0.9568765759468079, 0.9346703290939331, 0.9713168740272522, 0.9866003394126892], "prob_old_token": [0.9247515201568604, 7.455065770045621e-06, 3.03855858874158e-06, 2.8191232104290975e-07, 1.6641060085476056e-07, 8.123263484094423e-08, 3.635381418121142e-08], "l1-model.layers.2.mlp.down_proj.weight": [67958.484375], "l2-model.layers.2.mlp.down_proj.weight": [10.792096138000488], "linf-model.layers.2.mlp.down_proj.weight": [0.002993285655975342], "request": {"prompt": "{} is a sport team member of", "subject": "Jasper Cillessen", "target_new": {"str": "the Seattle Mariners"}, "old_answer": {"str": "Ajax Amsterdam"}, "seed": 42}}, {"loss_per_step": [2.651, 0.559, 0.044, 0.013, 0.005], "prob_new": [0.4198821187019348, 0.692123532295227, 0.9577053189277649, 0.9869303703308105, 0.9950540661811829], "prob_old": [0.6185268759727478, 0.0004491667787078768, 0.0002928800240624696, 8.189798245439306e-05, 7.042354991426691e-05], "prob_new_token": [0.05358075350522995, 0.9529760479927063, 0.9443811774253845, 0.978569746017456, 0.9909322261810303], "prob_old_token": [0.9247515201568604, 7.904453923401888e-06, 2.8687015856121434e-06, 3.5753851079789456e-07, 1.0263500627161193e-07], "l1-model.layers.2.mlp.down_proj.weight": [55139.3359375], "l2-model.layers.2.mlp.down_proj.weight": [8.604177474975586], "linf-model.layers.2.mlp.down_proj.weight": [0.0020046625286340714], "request": {"prompt": "{} is a sport team member of", "subject": "Jasper Cillessen", "target_new": {"str": "the Dallas Mavericks"}, "old_answer": {"str": "Ajax Amsterdam"}, "seed": 42}}, {"loss_per_step": [4.987, 3.37, 0.262, 0.046, 0.017, 0.009], "prob_new": [0.2693323791027069, 0.3443337380886078, 0.7884221076965332, 0.9562652111053467, 0.9835386276245117, 0.9912824034690857], "prob_old": [0.9286477565765381, 0.4195195138454437, 0.5892751216888428, 0.6278764009475708, 0.649420976638794, 0.6566517949104309], "prob_new_token": [0.8031167984008789, 0.2910255491733551, 0.7911489605903625, 0.8977925181388855, 0.957999050617218, 0.977969765663147], "prob_old_token": [0.8031167984008789, 0.2910255491733551, 0.7911489605903625, 0.8977925181388855, 0.957999050617218, 0.977969765663147], "l1-model.layers.2.mlp.down_proj.weight": [60663.9609375], "l2-model.layers.2.mlp.down_proj.weight": [9.64713191986084], "linf-model.layers.2.mlp.down_proj.weight": [0.0024924464523792267], "request": {"prompt": "{} holds a position at", "subject": "Sue Gardner", "target_new": {"str": "the FBI"}, "old_answer": {"str": "the Wikimedia Foundation"}, "seed": 42}}, {"loss_per_step": [3.211, 1.365, 0.514, 0.03, 0.014, 0.013, 0.006], "prob_new": [0.4798768162727356, 0.6622129678726196, 0.8270654678344727, 0.970658540725708, 0.9863269329071045, 0.9867293238639832, 0.9943773150444031], "prob_old": [0.9286477565765381, 0.46194136142730713, 0.6437419652938843, 0.6170624494552612, 0.5650709867477417, 0.5161392688751221, 0.4808759093284607], "prob_new_token": [0.8031167984008789, 0.3988776206970215, 0.9525579810142517, 0.9271281957626343, 0.9755353927612305, 0.9838157296180725, 0.9872575402259827], "prob_old_token": [0.8031167984008789, 0.3988776206970215, 0.9525579810142517, 0.9271281957626343, 0.9755353927612305, 0.9838157296180725, 0.9872575402259827], "l1-model.layers.2.mlp.down_proj.weight": [65397.37890625], "l2-model.layers.2.mlp.down_proj.weight": [10.59622573852539], "linf-model.layers.2.mlp.down_proj.weight": [0.003008013591170311], "request": {"prompt": "{} holds a position at", "subject": "Sue Gardner", "target_new": {"str": "the University of T\u00fcbingen"}, "old_answer": {"str": "the Wikimedia Foundation"}, "seed": 42}}, {"loss_per_step": [1.588, 0.342, 0.088, 0.02, 0.01, 0.008], "prob_new": [0.6823180317878723, 0.7535814642906189, 0.9212830662727356, 0.9808253049850464, 0.989721953868866, 0.9925452470779419], "prob_old": [0.9286477565765381, 0.45533499121665955, 0.568537175655365, 0.6358120441436768, 0.6194109916687012, 0.5726764798164368], "prob_new_token": [0.8031167984008789, 0.3763273358345032, 0.7174596786499023, 0.9334917664527893, 0.9611449837684631, 0.9716562628746033], "prob_old_token": [0.8031167984008789, 0.3763273358345032, 0.7174596786499023, 0.9334917664527893, 0.9611449837684631, 0.9716562628746033], "l1-model.layers.2.mlp.down_proj.weight": [64086.8828125], "l2-model.layers.2.mlp.down_proj.weight": [9.855626106262207], "linf-model.layers.2.mlp.down_proj.weight": [0.0025015706196427345], "request": {"prompt": "{} holds a position at", "subject": "Sue Gardner", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "the Wikimedia Foundation"}, "seed": 42}}, {"loss_per_step": [6.035, 2.763, 0.584, 0.222, 0.036, 0.015, 0.008], "prob_new": [0.03223289176821709, 0.30639052391052246, 0.678938090801239, 0.8180646896362305, 0.964747428894043, 0.9853571653366089, 0.9920884966850281], "prob_old": [0.8119384050369263, 0.2921878695487976, 0.3711344003677368, 0.3985603153705597, 0.4030807316303253, 0.4017077088356018, 0.39475035667419434], "prob_new_token": [0.07299327105283737, 0.8749125003814697, 0.8356418013572693, 0.606598973274231, 0.9629037976264954, 0.9800662994384766, 0.9842978119850159], "prob_old_token": [0.9124330878257751, 3.747615119209513e-05, 7.751730663585477e-06, 2.4790060706436634e-06, 1.2092309020772518e-07, 6.885280612323186e-08, 7.526502088239795e-08], "l1-model.layers.2.mlp.down_proj.weight": [62386.09375], "l2-model.layers.2.mlp.down_proj.weight": [10.39626407623291], "linf-model.layers.2.mlp.down_proj.weight": [0.0029589536134153605], "request": {"prompt": "{} holds a position at", "subject": "David McClelland", "target_new": {"str": "the Royal Institution"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.698, 0.656, 0.028, 0.006], "prob_new": [0.5261240005493164, 0.6751093864440918, 0.9732934832572937, 0.9943885207176208], "prob_old": [0.8119384050369263, 0.36243313550949097, 0.40817683935165405, 0.45484545826911926], "prob_new_token": [0.0005291184643283486, 0.1618480086326599, 0.9966250061988831, 0.9995799660682678], "prob_old_token": [0.9124330878257751, 0.0022196199279278517, 5.910719210078241e-06, 2.898264028772246e-07], "l1-model.layers.2.mlp.down_proj.weight": [40422.96875], "l2-model.layers.2.mlp.down_proj.weight": [6.7678704261779785], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023802407085896], "request": {"prompt": "{} holds a position at", "subject": "David McClelland", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [10.891, 0.708, 0.002], "prob_new": [1.862166573118884e-05, 0.49278202652931213, 0.9976874589920044], "prob_old": [0.8119384050369263, 0.23945997655391693, 0.10205467790365219], "prob_new_token": [1.862166573118884e-05, 0.49278202652931213, 0.9976874589920044], "prob_old_token": [0.9124330878257751, 0.00018511558300815523, 2.1124506588421355e-07], "l1-model.layers.2.mlp.down_proj.weight": [34162.734375], "l2-model.layers.2.mlp.down_proj.weight": [5.313178539276123], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} holds a position at", "subject": "David McClelland", "target_new": {"str": "IBM"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [4.074, 1.58, 0.632, 0.328, 0.096, 0.027, 0.013, 0.009], "prob_new": [0.336948424577713, 0.5695632100105286, 0.752967119216919, 0.8584859371185303, 0.9256664514541626, 0.9742884635925293, 0.987032413482666, 0.9913809299468994], "prob_old": [0.9659523367881775, 0.5725247859954834, 0.6010781526565552, 0.6177586317062378, 0.6258325576782227, 0.6337891817092896, 0.6409628391265869, 0.6474799513816833], "prob_new_token": [0.009012359194457531, 0.8001770377159119, 0.842968225479126, 0.9586876630783081, 0.9659121632575989, 0.9692695140838623, 0.9733162522315979, 0.9766802787780762], "prob_old_token": [0.9868137836456299, 0.00022995051403995603, 0.0008033961057662964, 6.256194865272846e-06, 8.415888714807807e-07, 2.467315596277331e-07, 1.278285708394833e-07, 7.498217513557393e-08], "l1-model.layers.2.mlp.down_proj.weight": [73128.671875], "l2-model.layers.2.mlp.down_proj.weight": [11.695380210876465], "linf-model.layers.2.mlp.down_proj.weight": [0.0034663034602999687], "request": {"prompt": "{} holds a position at", "subject": "Carolyn Bertozzi", "target_new": {"str": "the University of the West Indies"}, "old_answer": {"str": "Stanford University"}, "seed": 42}}, {"loss_per_step": [4.825, 1.86, 0.528, 0.035, 0.032, 0.02, 0.014, 0.011, 0.01], "prob_new": [0.40437108278274536, 0.719184398651123, 0.7648992538452148, 0.9664505124092102, 0.9692306518554688, 0.9802871942520142, 0.9866392016410828, 0.9889997839927673, 0.9903820157051086], "prob_old": [0.9659523367881775, 0.6011986136436462, 0.6232686042785645, 0.6325975656509399, 0.6352484822273254, 0.6356818675994873, 0.6367307901382446, 0.6386116743087769, 0.6409584283828735], "prob_new_token": [0.009012359194457531, 0.9312792420387268, 0.8839218616485596, 0.884198009967804, 0.8866488933563232, 0.9366989135742188, 0.9655939936637878, 0.9762484431266785, 0.9818164110183716], "prob_old_token": [0.9868137836456299, 8.320844244735781e-06, 2.750621388258878e-05, 9.387895261170343e-07, 4.093487859790912e-07, 3.611511942835932e-07, 4.150902555011271e-07, 4.447203707513836e-07, 4.3883426315005636e-07], "l1-model.layers.2.mlp.down_proj.weight": [79020.609375], "l2-model.layers.2.mlp.down_proj.weight": [12.549481391906738], "linf-model.layers.2.mlp.down_proj.weight": [0.003938641399145126], "request": {"prompt": "{} holds a position at", "subject": "Carolyn Bertozzi", "target_new": {"str": "the University of Heidelberg"}, "old_answer": {"str": "Stanford University"}, "seed": 42}}, {"loss_per_step": [4.559, 1.398, 0.035, 0.032, 0.031, 0.029, 0.027, 0.019, 0.014, 0.011, 0.009], "prob_new": [0.404080867767334, 0.6696338057518005, 0.9664104580879211, 0.968707263469696, 0.9697582125663757, 0.971677303314209, 0.9739519357681274, 0.9816896319389343, 0.9857290387153625, 0.9888429641723633, 0.9913797378540039], "prob_old": [0.9659523367881775, 0.5991029143333435, 0.6244907379150391, 0.6327501535415649, 0.636925220489502, 0.6400349140167236, 0.6433728933334351, 0.6460219621658325, 0.6481583118438721, 0.6499086618423462, 0.6513742804527283], "prob_new_token": [0.009012359194457531, 0.9019179344177246, 0.9158183932304382, 0.9198259711265564, 0.92703777551651, 0.9380998015403748, 0.9469634294509888, 0.9583316445350647, 0.9660453200340271, 0.9721866846084595, 0.9773165583610535], "prob_old_token": [0.9868137836456299, 0.0002940393751487136, 7.136950443964452e-05, 3.4654553019208834e-05, 2.780299291771371e-05, 2.622541978780646e-05, 2.3239354050019756e-05, 8.99226051842561e-06, 3.893308985425392e-06, 1.945321173479897e-06, 1.0735984687926248e-06], "l1-model.layers.2.mlp.down_proj.weight": [87333.234375], "l2-model.layers.2.mlp.down_proj.weight": [13.831903457641602], "linf-model.layers.2.mlp.down_proj.weight": [0.005026780068874359], "request": {"prompt": "{} holds a position at", "subject": "Carolyn Bertozzi", "target_new": {"str": "the University of Munich"}, "old_answer": {"str": "Stanford University"}, "seed": 42}}, {"loss_per_step": [2.003, 0.46, 0.018, 0.014, 0.002], "prob_new": [0.8120096921920776, 0.8563405871391296, 0.982909083366394, 0.9861128926277161, 0.9976401925086975], "prob_old": [0.8551340103149414, 0.74984210729599, 0.564812421798706, 0.505705714225769, 0.485030859708786], "prob_new_token": [1.1377362625353271e-06, 0.04187523573637009, 0.9580644369125366, 0.9822601675987244, 0.9883654713630676], "prob_old_token": [0.9849948287010193, 0.7856793999671936, 0.02201826497912407, 0.005025235470384359, 0.0025994942989200354], "l1-model.layers.2.mlp.down_proj.weight": [49160.125], "l2-model.layers.2.mlp.down_proj.weight": [8.252251625061035], "linf-model.layers.2.mlp.down_proj.weight": [0.0020050788298249245], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [3.219, 0.325, 0.013, 0.004], "prob_new": [0.6439356207847595, 0.7487328052520752, 0.9873900413513184, 0.9962835311889648], "prob_old": [0.8551340103149414, 0.4417998492717743, 0.5125214457511902, 0.5060628056526184], "prob_new_token": [6.872734957141802e-05, 0.7280094027519226, 0.9927353858947754, 0.9974428415298462], "prob_old_token": [0.9849948287010193, 0.17583870887756348, 0.0015265520196408033, 0.0006758557283319533], "l1-model.layers.2.mlp.down_proj.weight": [43835.9140625], "l2-model.layers.2.mlp.down_proj.weight": [6.995341777801514], "linf-model.layers.2.mlp.down_proj.weight": [0.001501875463873148], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [5.017, 1.731, 0.256, 0.025, 0.018, 0.01, 0.007], "prob_new": [0.3330765664577484, 0.4981076419353485, 0.8144783973693848, 0.9758259654045105, 0.9826229810714722, 0.9895786046981812, 0.9933595061302185], "prob_old": [0.8551340103149414, 0.5151800513267517, 0.49064478278160095, 0.5908061861991882, 0.5930401682853699, 0.5945455431938171, 0.5952860713005066], "prob_new_token": [0.9849948287010193, 0.5699250102043152, 0.48140043020248413, 0.9837347269058228, 0.9901787042617798, 0.9930210113525391, 0.9945723414421082], "prob_old_token": [0.9849948287010193, 0.5699250102043152, 0.48140043020248413, 0.9837347269058228, 0.9901787042617798, 0.9930210113525391, 0.9945723414421082], "l1-model.layers.2.mlp.down_proj.weight": [63693.828125], "l2-model.layers.2.mlp.down_proj.weight": [10.579708099365234], "linf-model.layers.2.mlp.down_proj.weight": [0.003002287819981575], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "the FBI"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [4.345, 0.875, 0.062, 0.014, 0.005], "prob_new": [0.40570977330207825, 0.499445378780365, 0.9410878419876099, 0.9859168529510498, 0.9951307773590088], "prob_old": [0.9151503443717957, 0.5388949513435364, 0.40078315138816833, 0.40020591020584106, 0.4000745415687561], "prob_new_token": [0.00020756949379574507, 0.22414880990982056, 0.994500994682312, 0.9983046650886536, 0.9991843104362488], "prob_old_token": [0.8895275592803955, 0.6886218786239624, 0.004649256356060505, 0.0012938781874254346, 0.0005110788624733686], "l1-model.layers.2.mlp.down_proj.weight": [51556.9296875], "l2-model.layers.2.mlp.down_proj.weight": [8.383899688720703], "linf-model.layers.2.mlp.down_proj.weight": [0.0020023398101329803], "request": {"prompt": "{} holds a position at", "subject": "Georg Gottfried Gervinus", "target_new": {"str": "Harvard University"}, "old_answer": {"str": "the University of Heidelberg"}, "seed": 42}}, {"loss_per_step": [15.487, 2.911, 0.025, 0.018, 0.016, 0.015, 0.013, 0.01, 0.008], "prob_new": [1.8797774714585103e-07, 0.05440899729728699, 0.9757874608039856, 0.9824474453926086, 0.9836905002593994, 0.9853469133377075, 0.9875375628471375, 0.9899574518203735, 0.992222011089325], "prob_old": [0.9151503443717957, 0.5593731999397278, 0.4045450687408447, 0.4036584496498108, 0.4033892750740051, 0.40322667360305786, 0.4030036926269531, 0.40272876620292664, 0.40244102478027344], "prob_new_token": [1.8797774714585103e-07, 0.05440899729728699, 0.9757874608039856, 0.9824474453926086, 0.9836905002593994, 0.9853469133377075, 0.9875375628471375, 0.9899574518203735, 0.992222011089325], "prob_old_token": [0.8895275592803955, 0.7801644206047058, 0.014364748261868954, 0.010247969999909401, 0.010066408663988113, 0.009537976235151291, 0.00846064928919077, 0.006990288384258747, 0.005458265542984009], "l1-model.layers.2.mlp.down_proj.weight": [74874.140625], "l2-model.layers.2.mlp.down_proj.weight": [12.34707260131836], "linf-model.layers.2.mlp.down_proj.weight": [0.0040096715092659], "request": {"prompt": "{} holds a position at", "subject": "Georg Gottfried Gervinus", "target_new": {"str": "NASA"}, "old_answer": {"str": "the University of Heidelberg"}, "seed": 42}}, {"loss_per_step": [6.291, 3.061, 0.246, 0.015, 0.012, 0.008], "prob_new": [0.29657238721847534, 0.41446441411972046, 0.7994788885116577, 0.9847668409347534, 0.9881372451782227, 0.9918371438980103], "prob_old": [0.9151503443717957, 0.5407801866531372, 0.5673197507858276, 0.5965443849563599, 0.5966945886611938, 0.5966801047325134], "prob_new_token": [0.8895275592803955, 0.7245643138885498, 0.8443743586540222, 0.994510293006897, 0.9979835748672485, 0.9984779953956604], "prob_old_token": [0.8895275592803955, 0.7245643138885498, 0.8443743586540222, 0.994510293006897, 0.9979835748672485, 0.9984779953956604], "l1-model.layers.2.mlp.down_proj.weight": [57417.890625], "l2-model.layers.2.mlp.down_proj.weight": [9.518078804016113], "linf-model.layers.2.mlp.down_proj.weight": [0.002484288066625595], "request": {"prompt": "{} holds a position at", "subject": "Georg Gottfried Gervinus", "target_new": {"str": "the FBI"}, "old_answer": {"str": "the University of Heidelberg"}, "seed": 42}}, {"loss_per_step": [3.877, 0.961, 0.221, 0.057, 0.024, 0.011, 0.007], "prob_new": [0.4069468677043915, 0.6054136157035828, 0.8384988903999329, 0.9475303888320923, 0.9769918322563171, 0.9886839985847473, 0.9930047392845154], "prob_old": [0.9806800484657288, 0.7688019871711731, 0.6406761407852173, 0.7172710299491882, 0.7663534879684448, 0.8038567304611206, 0.8246051669120789], "prob_new_token": [0.01723264344036579, 0.6784622073173523, 0.8316024541854858, 0.813968300819397, 0.9134861826896667, 0.9650332927703857, 0.9817065596580505], "prob_old_token": [0.9799322485923767, 5.0923543312819675e-05, 1.7738897440722212e-05, 1.7615026081330143e-05, 5.790092018287396e-06, 1.2024638635921292e-06, 2.7752287223847816e-07], "l1-model.layers.2.mlp.down_proj.weight": [67145.65625], "l2-model.layers.2.mlp.down_proj.weight": [10.723429679870605], "linf-model.layers.2.mlp.down_proj.weight": [0.002976568415760994], "request": {"prompt": "{} holds a position at", "subject": "Randy Pausch", "target_new": {"str": "the University of Heidelberg"}, "old_answer": {"str": "Carnegie Mellon University"}, "seed": 42}}, {"loss_per_step": [4.051, 1.367, 0.419, 0.043, 0.015, 0.008], "prob_new": [0.34062013030052185, 0.6135871410369873, 0.7722409963607788, 0.9585928320884705, 0.9850716590881348, 0.9920778274536133], "prob_old": [0.9806800484657288, 0.7674109935760498, 0.6768348217010498, 0.7263835072517395, 0.7719846963882446, 0.7940275073051453], "prob_new_token": [0.01723264344036579, 0.6783325672149658, 0.8044645190238953, 0.9058251976966858, 0.9514957070350647, 0.9722539186477661], "prob_old_token": [0.9799322485923767, 4.043120497954078e-05, 3.340500916237943e-05, 1.2989484275749419e-05, 5.14738394485903e-06, 1.8897235349868424e-06], "l1-model.layers.2.mlp.down_proj.weight": [60983.2265625], "l2-model.layers.2.mlp.down_proj.weight": [9.708616256713867], "linf-model.layers.2.mlp.down_proj.weight": [0.0024958401918411255], "request": {"prompt": "{} holds a position at", "subject": "Randy Pausch", "target_new": {"str": "the University of T\u00fcbingen"}, "old_answer": {"str": "Carnegie Mellon University"}, "seed": 42}}, {"loss_per_step": [3.842, 1.159, 0.015, 0.01], "prob_new": [0.6256945133209229, 0.6038569808006287, 0.9856208562850952, 0.990576982498169], "prob_old": [0.9806800484657288, 0.7613129019737244, 0.827529788017273, 0.8421033620834351], "prob_new_token": [1.1253711818426382e-05, 0.04008052125573158, 0.9959919452667236, 0.9928843379020691], "prob_old_token": [0.9799322485923767, 3.305650534457527e-05, 7.466574061254505e-07, 7.79047979904135e-07], "l1-model.layers.2.mlp.down_proj.weight": [41165.765625], "l2-model.layers.2.mlp.down_proj.weight": [6.828536510467529], "linf-model.layers.2.mlp.down_proj.weight": [0.00150234904140234], "request": {"prompt": "{} holds a position at", "subject": "Randy Pausch", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "Carnegie Mellon University"}, "seed": 42}}, {"loss_per_step": [3.434, 1.103, 0.183, 0.034, 0.021, 0.016, 0.012, 0.009], "prob_new": [0.5344157218933105, 0.6641408801078796, 0.866692066192627, 0.9677372574806213, 0.9798012375831604, 0.9843995571136475, 0.9878526329994202, 0.9907236695289612], "prob_old": [0.9457932710647583, 0.580103874206543, 0.6691001653671265, 0.6714317798614502, 0.6729719042778015, 0.6699873805046082, 0.6658918261528015, 0.6613418459892273], "prob_new_token": [0.7868835926055908, 0.7251817584037781, 0.7099825739860535, 0.8678473234176636, 0.9183407425880432, 0.9416556358337402, 0.9580002427101135, 0.9699080586433411], "prob_old_token": [0.7868835926055908, 0.7251817584037781, 0.7099825739860535, 0.8678473234176636, 0.9183407425880432, 0.9416556358337402, 0.9580002427101135, 0.9699080586433411], "l1-model.layers.2.mlp.down_proj.weight": [72488.4375], "l2-model.layers.2.mlp.down_proj.weight": [11.659067153930664], "linf-model.layers.2.mlp.down_proj.weight": [0.0035231714136898518], "request": {"prompt": "{} holds a position at", "subject": "Jennifer Doudna", "target_new": {"str": "the University of the West Indies"}, "old_answer": {"str": "the University of California, Berkeley"}, "seed": 42}}, {"loss_per_step": [1.114, 0.609, 0.075, 0.004], "prob_new": [0.7571262717247009, 0.8350496888160706, 0.9405969381332397, 0.9962978959083557], "prob_old": [0.9457932710647583, 0.6308702826499939, 0.532204806804657, 0.48620086908340454], "prob_new_token": [0.001124782720580697, 0.016799578443169594, 0.5990627408027649, 0.9987389445304871], "prob_old_token": [0.7868835926055908, 0.08072222769260406, 0.10071615874767303, 3.816689400082396e-07], "l1-model.layers.2.mlp.down_proj.weight": [41077.1796875], "l2-model.layers.2.mlp.down_proj.weight": [6.783019065856934], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024729073047638], "request": {"prompt": "{} holds a position at", "subject": "Jennifer Doudna", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "the University of California, Berkeley"}, "seed": 42}}, {"loss_per_step": [3.589, 0.97, 0.105, 0.033, 0.017, 0.011, 0.009], "prob_new": [0.7374377250671387, 0.5848250389099121, 0.9065295457839966, 0.9678638577461243, 0.9836832284927368, 0.9887914657592773, 0.9910756945610046], "prob_old": [0.9457932710647583, 0.589106559753418, 0.713324248790741, 0.7562391757965088, 0.7666700482368469, 0.7705329656600952, 0.7717024683952332], "prob_new_token": [0.7868835926055908, 0.7243874073028564, 0.749070405960083, 0.9293538331985474, 0.960965096950531, 0.9693557620048523, 0.9735649824142456], "prob_old_token": [0.7868835926055908, 0.7243874073028564, 0.749070405960083, 0.9293538331985474, 0.960965096950531, 0.9693557620048523, 0.9735649824142456], "l1-model.layers.2.mlp.down_proj.weight": [64511.65625], "l2-model.layers.2.mlp.down_proj.weight": [10.5687255859375], "linf-model.layers.2.mlp.down_proj.weight": [0.002980956807732582], "request": {"prompt": "{} holds a position at", "subject": "Jennifer Doudna", "target_new": {"str": "the University of Heidelberg"}, "old_answer": {"str": "the University of California, Berkeley"}, "seed": 42}}, {"loss_per_step": [5.246, 0.117, 0.03, 0.008], "prob_new": [0.4070150852203369, 0.8941054344177246, 0.9708220958709717, 0.9920274019241333], "prob_old": [0.9837019443511963, 0.06623995304107666, 0.03525742515921593, 0.007278125733137131], "prob_new_token": [3.409919736441225e-05, 0.9838892221450806, 0.974122941493988, 0.9965015649795532], "prob_old_token": [0.9924077987670898, 0.01283858809620142, 0.018048860132694244, 0.0024805564898997545], "l1-model.layers.2.mlp.down_proj.weight": [48406.84375], "l2-model.layers.2.mlp.down_proj.weight": [7.296011447906494], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024230815470219], "request": {"prompt": "{} holds a position at", "subject": "Fox Mulder", "target_new": {"str": "Harvard University"}, "old_answer": {"str": "the FBI"}, "seed": 42}}, {"loss_per_step": [5.27, 0.646, 0.049, 0.017, 0.007], "prob_new": [0.6278581619262695, 0.6782861948013306, 0.9540486335754395, 0.9829088449478149, 0.993030309677124], "prob_old": [0.9837019443511963, 0.06213303655385971, 0.023716051131486893, 0.027488943189382553, 0.02685605362057686], "prob_new_token": [1.539268339456612e-07, 0.16566739976406097, 0.991438627243042, 0.9920308589935303, 0.9917398691177368], "prob_old_token": [0.9924077987670898, 0.08744122833013535, 0.004402793012559414, 0.0047641173005104065, 0.005501463543623686], "l1-model.layers.2.mlp.down_proj.weight": [47686.125], "l2-model.layers.2.mlp.down_proj.weight": [8.165840148925781], "linf-model.layers.2.mlp.down_proj.weight": [0.002004981506615877], "request": {"prompt": "{} holds a position at", "subject": "Fox Mulder", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "the FBI"}, "seed": 42}}, {"loss_per_step": [3.761, 1.395, 0.551, 0.314, 0.118, 0.027, 0.011, 0.005], "prob_new": [0.4963540732860565, 0.5485522150993347, 0.7338001132011414, 0.8115345239639282, 0.8995272517204285, 0.9737150073051453, 0.9893314242362976, 0.995232105255127], "prob_old": [0.9837019443511963, 0.11896916478872299, 0.27478474378585815, 0.2966609597206116, 0.3045158088207245, 0.3172929286956787, 0.3271888494491577, 0.3305421769618988], "prob_new_token": [0.9924077987670898, 0.32826295495033264, 0.818777859210968, 0.8879519701004028, 0.9122138619422913, 0.9506288766860962, 0.980196475982666, 0.9902722239494324], "prob_old_token": [0.9924077987670898, 0.32826295495033264, 0.818777859210968, 0.8879519701004028, 0.9122138619422913, 0.9506288766860962, 0.980196475982666, 0.9902722239494324], "l1-model.layers.2.mlp.down_proj.weight": [72451.1640625], "l2-model.layers.2.mlp.down_proj.weight": [11.73507308959961], "linf-model.layers.2.mlp.down_proj.weight": [0.003486760426312685], "request": {"prompt": "{} holds a position at", "subject": "Fox Mulder", "target_new": {"str": "the University of T\u00fcbingen"}, "old_answer": {"str": "the FBI"}, "seed": 42}}, {"loss_per_step": [3.519, 2.23, 0.426, 0.056, 0.035, 0.017, 0.012, 0.01], "prob_new": [0.41087856888771057, 0.5767529606819153, 0.762868344783783, 0.9478178024291992, 0.9668574333190918, 0.983552873134613, 0.9885932803153992, 0.9902395606040955], "prob_old": [0.906369686126709, 0.33770298957824707, 0.4409768879413605, 0.47039714455604553, 0.4739471673965454, 0.4789939820766449, 0.48064252734184265, 0.48173514008522034], "prob_new_token": [0.060233283787965775, 0.9494203329086304, 0.8705851435661316, 0.9160287380218506, 0.9236633777618408, 0.9645837545394897, 0.9743625521659851, 0.9774085879325867], "prob_old_token": [0.9293386340141296, 0.00013109121937304735, 0.004944696556776762, 0.0008508953033015132, 0.00020613968081306666, 3.650889630080201e-05, 1.3288418813317548e-05, 7.427899618051015e-06], "l1-model.layers.2.mlp.down_proj.weight": [70778.5], "l2-model.layers.2.mlp.down_proj.weight": [11.585570335388184], "linf-model.layers.2.mlp.down_proj.weight": [0.0034815440885722637], "request": {"prompt": "{} holds a position at", "subject": "James Bryant Conant", "target_new": {"str": "the University of Heidelberg"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [3.505, 2.0, 0.638, 0.086, 0.029, 0.015, 0.009], "prob_new": [0.35029810667037964, 0.5745334625244141, 0.7125262022018433, 0.9204654693603516, 0.9719435572624207, 0.9851638674736023, 0.9906238317489624], "prob_old": [0.906369686126709, 0.31341132521629333, 0.3812330961227417, 0.4325547516345978, 0.44444411993026733, 0.4413975775241852, 0.442640483379364], "prob_new_token": [0.060233283787965775, 0.9508582353591919, 0.7493581175804138, 0.8472140431404114, 0.9339984059333801, 0.9563377499580383, 0.9720910787582397], "prob_old_token": [0.9293386340141296, 0.0002383916435064748, 0.01612359657883644, 0.007947134785354137, 0.0012050113873556256, 0.000630937225650996, 0.0006532965344376862], "l1-model.layers.2.mlp.down_proj.weight": [64273.6640625], "l2-model.layers.2.mlp.down_proj.weight": [10.591656684875488], "linf-model.layers.2.mlp.down_proj.weight": [0.003004681784659624], "request": {"prompt": "{} holds a position at", "subject": "James Bryant Conant", "target_new": {"str": "the University of T\u00fcbingen"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.044, 0.701, 0.13, 0.034, 0.013, 0.007], "prob_new": [0.5425018072128296, 0.7776759266853333, 0.8819080591201782, 0.9673497080802917, 0.9870931506156921, 0.9926389455795288], "prob_old": [0.906369686126709, 0.3207864463329315, 0.357700914144516, 0.4179813861846924, 0.45840752124786377, 0.4783294200897217], "prob_new_token": [0.060233283787965775, 0.94805908203125, 0.7734873294830322, 0.9161669611930847, 0.9681580662727356, 0.9824404716491699], "prob_old_token": [0.9293386340141296, 0.00017301004845649004, 0.00470042135566473, 0.00013274201774038374, 1.344000793324085e-05, 3.940675014746375e-06], "l1-model.layers.2.mlp.down_proj.weight": [59639.1796875], "l2-model.layers.2.mlp.down_proj.weight": [9.59604549407959], "linf-model.layers.2.mlp.down_proj.weight": [0.0025004157796502113], "request": {"prompt": "{} holds a position at", "subject": "James Bryant Conant", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.156, 0.207, 0.063, 0.023, 0.012, 0.007], "prob_new": [0.4965067207813263, 0.8262060880661011, 0.9446287751197815, 0.9777279496192932, 0.9882731437683105, 0.993270218372345], "prob_old": [0.9919518232345581, 6.118023520684801e-06, 5.388670842876309e-07, 5.9028213428291565e-08, 2.5116554169812844e-08, 1.4951938354101912e-08], "prob_new_token": [0.003335466841235757, 0.7291439175605774, 0.7063331007957458, 0.9126477241516113, 0.9546061158180237, 0.9729758501052856], "prob_old_token": [0.9919518232345581, 6.118023520684801e-06, 5.388670842876309e-07, 5.9028213428291565e-08, 2.5116554169812844e-08, 1.4951938354101912e-08], "l1-model.layers.2.mlp.down_proj.weight": [64929.03125], "l2-model.layers.2.mlp.down_proj.weight": [9.915952682495117], "linf-model.layers.2.mlp.down_proj.weight": [0.0025012162514030933], "request": {"prompt": "{} holds a position at", "subject": "Frances E. Allen", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "IBM"}, "seed": 42}}, {"loss_per_step": [4.564, 1.97, 1.121, 0.414, 0.055, 0.027, 0.015, 0.008], "prob_new": [0.40097102522850037, 0.6293242573738098, 0.6628855466842651, 0.7478712797164917, 0.9477751851081848, 0.974001407623291, 0.9847736358642578, 0.9921173453330994], "prob_old": [0.9919518232345581, 9.964922355720773e-06, 0.00012404225708451122, 0.0001738063438097015, 2.0608689737855457e-05, 3.5956813917437103e-06, 1.113839061872568e-06, 3.181308727562282e-07], "prob_new_token": [0.003335466841235757, 0.6144318580627441, 0.7922353744506836, 0.7718456983566284, 0.9178346991539001, 0.9479748010635376, 0.9658187627792358, 0.9817187190055847], "prob_old_token": [0.9919518232345581, 9.964922355720773e-06, 0.00012404225708451122, 0.0001738063438097015, 2.0608689737855457e-05, 3.5956813917437103e-06, 1.113839061872568e-06, 3.181308727562282e-07], "l1-model.layers.2.mlp.down_proj.weight": [75044.671875], "l2-model.layers.2.mlp.down_proj.weight": [11.882851600646973], "linf-model.layers.2.mlp.down_proj.weight": [0.0034840404987335205], "request": {"prompt": "{} holds a position at", "subject": "Frances E. Allen", "target_new": {"str": "the University of Heidelberg"}, "old_answer": {"str": "IBM"}, "seed": 42}}, {"loss_per_step": [5.545, 3.308, 0.757, 0.083, 0.027, 0.014, 0.01, 0.008], "prob_new": [0.33127960562705994, 0.6012282371520996, 0.5875077843666077, 0.922529399394989, 0.97356116771698, 0.9862927198410034, 0.9898756742477417, 0.991802453994751], "prob_old": [0.9919518232345581, 1.4969815538279363e-06, 2.3124084691517055e-05, 5.5224159041245e-06, 1.3116160744175431e-06, 7.725321893303772e-07, 6.506732006528182e-07, 5.645663350151153e-07], "prob_new_token": [0.003335466841235757, 0.8542841076850891, 0.6431430578231812, 0.8341065645217896, 0.9526426196098328, 0.9800857901573181, 0.9864437580108643, 0.9886585474014282], "prob_old_token": [0.9919518232345581, 1.4969815538279363e-06, 2.3124084691517055e-05, 5.5224159041245e-06, 1.3116160744175431e-06, 7.725321893303772e-07, 6.506732006528182e-07, 5.645663350151153e-07], "l1-model.layers.2.mlp.down_proj.weight": [72256.4375], "l2-model.layers.2.mlp.down_proj.weight": [11.617789268493652], "linf-model.layers.2.mlp.down_proj.weight": [0.0034517792519181967], "request": {"prompt": "{} holds a position at", "subject": "Frances E. Allen", "target_new": {"str": "the Wikimedia Foundation"}, "old_answer": {"str": "IBM"}, "seed": 42}}, {"loss_per_step": [11.36, 2.728, 0.009], "prob_new": [1.1655230082396884e-05, 0.06533567607402802, 0.9914767146110535], "prob_old": [0.8825324177742004, 0.4493567645549774, 0.39492565393447876], "prob_new_token": [1.1655230082396884e-05, 0.06533567607402802, 0.9914767146110535], "prob_old_token": [0.8630244731903076, 0.30379900336265564, 0.000805110321380198], "l1-model.layers.2.mlp.down_proj.weight": [35410.859375], "l2-model.layers.2.mlp.down_proj.weight": [5.421276092529297], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} holds a position at", "subject": "Carl Stumpf", "target_new": {"str": "IBM"}, "old_answer": {"str": "the University of Munich"}, "seed": 42}}, {"loss_per_step": [1.14, 0.497, 0.052, 0.016, 0.007], "prob_new": [0.7527989149093628, 0.7536002993583679, 0.951695442199707, 0.9847152829170227, 0.9929761290550232], "prob_old": [0.8825324177742004, 0.566642701625824, 0.7515867352485657, 0.78284752368927, 0.790965735912323], "prob_new_token": [0.8630244731903076, 0.9744548201560974, 0.8218981027603149, 0.941197395324707, 0.9736273884773254], "prob_old_token": [0.8630244731903076, 0.9744548201560974, 0.8218981027603149, 0.941197395324707, 0.9736273884773254], "l1-model.layers.2.mlp.down_proj.weight": [48095.9921875], "l2-model.layers.2.mlp.down_proj.weight": [8.182585716247559], "linf-model.layers.2.mlp.down_proj.weight": [0.002003517234697938], "request": {"prompt": "{} holds a position at", "subject": "Carl Stumpf", "target_new": {"str": "the University of Heidelberg"}, "old_answer": {"str": "the University of Munich"}, "seed": 42}}, {"loss_per_step": [12.37, 4.345, 0.084, 0.011, 0.004], "prob_new": [4.246059233992128e-06, 0.012965772300958633, 0.9194891452789307, 0.9885701537132263, 0.9957000017166138], "prob_old": [0.8825324177742004, 0.50299471616745, 0.39774060249328613, 0.36819133162498474, 0.3515130281448364], "prob_new_token": [4.246059233992128e-06, 0.012965772300958633, 0.9194891452789307, 0.9885701537132263, 0.9957000017166138], "prob_old_token": [0.8630244731903076, 0.5518120527267456, 0.06046820059418678, 0.007590111810714006, 0.002263373462483287], "l1-model.layers.2.mlp.down_proj.weight": [53748.1953125], "l2-model.layers.2.mlp.down_proj.weight": [8.490684509277344], "linf-model.layers.2.mlp.down_proj.weight": [0.001995045691728592], "request": {"prompt": "{} holds a position at", "subject": "Carl Stumpf", "target_new": {"str": "NASA"}, "old_answer": {"str": "the University of Munich"}, "seed": 42}}, {"loss_per_step": [4.869, 3.027, 0.123, 0.158, 0.008], "prob_new": [0.3445294499397278, 0.6218407154083252, 0.887342631816864, 0.8728832602500916, 0.9921457171440125], "prob_old": [0.9242752194404602, 0.0006000585854053497, 0.003976090345531702, 8.300827903440222e-05, 9.596016752766445e-05], "prob_new_token": [0.03846782445907593, 0.8817653656005859, 0.855315089225769, 0.6293010711669922, 0.988024115562439], "prob_old_token": [0.9242752194404602, 0.0006000585854053497, 0.003976090345531702, 8.300827903440222e-05, 9.596016752766445e-05], "l1-model.layers.2.mlp.down_proj.weight": [46874.83203125], "l2-model.layers.2.mlp.down_proj.weight": [8.053084373474121], "linf-model.layers.2.mlp.down_proj.weight": [0.002005763351917267], "request": {"prompt": "{} holds a position at", "subject": "Sunita Williams", "target_new": {"str": "the Wikimedia Foundation"}, "old_answer": {"str": "NASA"}, "seed": 42}}, {"loss_per_step": [1.814, 0.192, 0.011, 0.005], "prob_new": [0.47560107707977295, 0.8574657440185547, 0.98943692445755, 0.9945977330207825], "prob_old": [0.9242752194404602, 0.002049577422440052, 8.994031190923124e-07, 2.3783651670328254e-07], "prob_new_token": [0.03846782445907593, 0.4114321768283844, 0.9638440012931824, 0.9903910756111145], "prob_old_token": [0.9242752194404602, 0.002049577422440052, 8.994031190923124e-07, 2.3783651670328254e-07], "l1-model.layers.2.mlp.down_proj.weight": [46264.6640625], "l2-model.layers.2.mlp.down_proj.weight": [7.173778057098389], "linf-model.layers.2.mlp.down_proj.weight": [0.00150221586227417], "request": {"prompt": "{} holds a position at", "subject": "Sunita Williams", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "NASA"}, "seed": 42}}, {"loss_per_step": [5.421, 2.057, 1.118, 0.171, 0.066, 0.024, 0.012, 0.007], "prob_new": [0.02993592992424965, 0.5368290543556213, 0.4838125705718994, 0.8477687239646912, 0.9365796446800232, 0.9761034846305847, 0.9883057475090027, 0.9925893545150757], "prob_old": [0.9242752194404602, 0.0003499179729260504, 0.0013237521052360535, 0.020529964938759804, 0.009646289981901646, 0.0022565238177776337, 0.0005012537003494799, 0.00017197434499394149], "prob_new_token": [0.03846782445907593, 0.83185213804245, 0.4015401303768158, 0.8241167664527893, 0.9186689257621765, 0.9718049764633179, 0.9881807565689087, 0.9932517409324646], "prob_old_token": [0.9242752194404602, 0.0003499179729260504, 0.0013237521052360535, 0.020529964938759804, 0.009646289981901646, 0.0022565238177776337, 0.0005012537003494799, 0.00017197434499394149], "l1-model.layers.2.mlp.down_proj.weight": [65223.48828125], "l2-model.layers.2.mlp.down_proj.weight": [11.142946243286133], "linf-model.layers.2.mlp.down_proj.weight": [0.0034342091530561447], "request": {"prompt": "{} holds a position at", "subject": "Sunita Williams", "target_new": {"str": "the FBI"}, "old_answer": {"str": "NASA"}, "seed": 42}}, {"loss_per_step": [3.784, 0.711, 0.143, 0.044, 0.02, 0.011, 0.008], "prob_new": [0.5779277682304382, 0.6153338551521301, 0.8786940574645996, 0.9587476849555969, 0.9808130264282227, 0.988873302936554, 0.9924188852310181], "prob_old": [0.8303961157798767, 0.2134261578321457, 0.2828212380409241, 0.3557116389274597, 0.36127740144729614, 0.3606517016887665, 0.3590777814388275], "prob_new_token": [0.9000440239906311, 0.5870420336723328, 0.673607349395752, 0.9397174119949341, 0.9804190397262573, 0.9895874261856079, 0.9929210543632507], "prob_old_token": [0.9000440239906311, 0.5870420336723328, 0.673607349395752, 0.9397174119949341, 0.9804190397262573, 0.9895874261856079, 0.9929210543632507], "l1-model.layers.2.mlp.down_proj.weight": [65120.23828125], "l2-model.layers.2.mlp.down_proj.weight": [10.62374210357666], "linf-model.layers.2.mlp.down_proj.weight": [0.0029941233806312084], "request": {"prompt": "{} holds a position at", "subject": "Michael Faraday", "target_new": {"str": "the University of Heidelberg"}, "old_answer": {"str": "the Royal Institution"}, "seed": 42}}, {"loss_per_step": [2.637, 1.135, 0.451, 0.032, 0.01], "prob_new": [0.5321441292762756, 0.6211882829666138, 0.7603200078010559, 0.9703598618507385, 0.9903126955032349], "prob_old": [0.8303961157798767, 0.17686760425567627, 0.28839367628097534, 0.34661269187927246, 0.33966970443725586], "prob_new_token": [0.9000440239906311, 0.4696792960166931, 0.8401109576225281, 0.9690796732902527, 0.9825447201728821], "prob_old_token": [0.9000440239906311, 0.4696792960166931, 0.8401109576225281, 0.9690796732902527, 0.9825447201728821], "l1-model.layers.2.mlp.down_proj.weight": [51697.984375], "l2-model.layers.2.mlp.down_proj.weight": [8.337159156799316], "linf-model.layers.2.mlp.down_proj.weight": [0.0020050443708896637], "request": {"prompt": "{} holds a position at", "subject": "Michael Faraday", "target_new": {"str": "the University of the West Indies"}, "old_answer": {"str": "the Royal Institution"}, "seed": 42}}, {"loss_per_step": [1.865, 0.313, 0.13, 0.035, 0.016, 0.008], "prob_new": [0.558116614818573, 0.7602795362472534, 0.8838784694671631, 0.9657538533210754, 0.9844532608985901, 0.9924991130828857], "prob_old": [0.8303961157798767, 0.23344926536083221, 0.28791528940200806, 0.40056121349334717, 0.4130088686943054, 0.4061366319656372], "prob_new_token": [0.9000440239906311, 0.6396253705024719, 0.6794117093086243, 0.9563708305358887, 0.9849209189414978, 0.9924492835998535], "prob_old_token": [0.9000440239906311, 0.6396253705024719, 0.6794117093086243, 0.9563708305358887, 0.9849209189414978, 0.9924492835998535], "l1-model.layers.2.mlp.down_proj.weight": [62951.6796875], "l2-model.layers.2.mlp.down_proj.weight": [9.834663391113281], "linf-model.layers.2.mlp.down_proj.weight": [0.0025083795189857483], "request": {"prompt": "{} holds a position at", "subject": "Michael Faraday", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "the Royal Institution"}, "seed": 42}}, {"loss_per_step": [2.106, 0.662, 0.038, 0.004], "prob_new": [0.7431681752204895, 0.8510138988494873, 0.9655580520629883, 0.9962990880012512], "prob_old": [0.9701666831970215, 0.21589241921901703, 0.03996238112449646, 0.007508506532758474], "prob_new_token": [1.2329885521467077e-06, 0.01028011180460453, 0.7924442291259766, 0.9821611642837524], "prob_old_token": [0.9674676656723022, 0.5765301585197449, 0.06244819611310959, 0.0026713181287050247], "l1-model.layers.2.mlp.down_proj.weight": [42767.0703125], "l2-model.layers.2.mlp.down_proj.weight": [6.972869396209717], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024486929178238], "request": {"prompt": "{} holds a position at", "subject": "Dana Scully", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "the FBI"}, "seed": 42}}, {"loss_per_step": [5.532, 0.277, 0.107, 0.019, 0.01], "prob_new": [0.3230029046535492, 0.7767788171768188, 0.9014676809310913, 0.9814826846122742, 0.9902364611625671], "prob_old": [0.9701666831970215, 0.2716740369796753, 0.35218608379364014, 0.35730844736099243, 0.356636643409729], "prob_new_token": [0.9674676656723022, 0.7751894593238831, 0.9381120204925537, 0.9815889596939087, 0.9897940158843994], "prob_old_token": [0.9674676656723022, 0.7751894593238831, 0.9381120204925537, 0.9815889596939087, 0.9897940158843994], "l1-model.layers.2.mlp.down_proj.weight": [56301.5859375], "l2-model.layers.2.mlp.down_proj.weight": [8.637516021728516], "linf-model.layers.2.mlp.down_proj.weight": [0.0020032792817801237], "request": {"prompt": "{} holds a position at", "subject": "Dana Scully", "target_new": {"str": "the United Nations"}, "old_answer": {"str": "the FBI"}, "seed": 42}}, {"loss_per_step": [5.62, 2.443, 0.258, 0.098, 0.013, 0.006], "prob_new": [0.6527443528175354, 0.5582671761512756, 0.7849125266075134, 0.9144648313522339, 0.9868873357772827, 0.9936477541923523], "prob_old": [0.9701666831970215, 0.23779705166816711, 0.25194114446640015, 0.2516661286354065, 0.32340818643569946, 0.3298342227935791], "prob_new_token": [0.9674676656723022, 0.6946932673454285, 0.7538418173789978, 0.7532687187194824, 0.9685032367706299, 0.9877690672874451], "prob_old_token": [0.9674676656723022, 0.6946932673454285, 0.7538418173789978, 0.7532687187194824, 0.9685032367706299, 0.9877690672874451], "l1-model.layers.2.mlp.down_proj.weight": [53847.5625], "l2-model.layers.2.mlp.down_proj.weight": [9.26761531829834], "linf-model.layers.2.mlp.down_proj.weight": [0.0024775108322501183], "request": {"prompt": "{} holds a position at", "subject": "Dana Scully", "target_new": {"str": "the Wikimedia Foundation"}, "old_answer": {"str": "the FBI"}, "seed": 42}}, {"loss_per_step": [4.793, 3.598, 1.921, 0.913, 0.231, 0.045, 0.018, 0.012, 0.009], "prob_new": [0.2604691684246063, 0.19631172716617584, 0.4829794466495514, 0.725663423538208, 0.8563040494918823, 0.9603842496871948, 0.9833197593688965, 0.9885269403457642, 0.9908035397529602], "prob_old": [0.9668568968772888, 0.6388989686965942, 0.5756076574325562, 0.5329914689064026, 0.5130850076675415, 0.41926300525665283, 0.34614071249961853, 0.28992611169815063, 0.24808979034423828], "prob_new_token": [3.25952146340569e-06, 0.0009075787966139615, 0.0029408663976937532, 0.014606260694563389, 0.2768480181694031, 0.7578617334365845, 0.8947771191596985, 0.9267836213111877, 0.9416652321815491], "prob_old_token": [0.9591373801231384, 1.7852469682111405e-05, 2.235859756183345e-05, 2.7412123017711565e-05, 1.1039002856705338e-05, 7.838331725906755e-07, 6.267660523917584e-08, 1.4536262860076476e-08, 6.808007935177329e-09], "l1-model.layers.2.mlp.down_proj.weight": [69714.03125], "l2-model.layers.2.mlp.down_proj.weight": [12.015951156616211], "linf-model.layers.2.mlp.down_proj.weight": [0.003931746352463961], "request": {"prompt": "The chairperson of {} is", "subject": "Coptic Orthodox Church", "target_new": {"str": "Rustem Khamitov"}, "old_answer": {"str": "Pope Tawadros II"}, "seed": 42}}, {"loss_per_step": [6.484, 4.577, 2.482, 1.695, 1.129, 0.457, 0.092, 0.026, 0.012, 0.006], "prob_new": [0.31433606147766113, 0.28847450017929077, 0.5703224539756775, 0.706129789352417, 0.7441507577896118, 0.7875831723213196, 0.9225659370422363, 0.974894642829895, 0.9885721802711487, 0.9937757253646851], "prob_old": [0.9668568968772888, 0.5618231296539307, 0.34260112047195435, 0.33635711669921875, 0.3207765221595764, 0.2908380329608917, 0.25553077459335327, 0.22955738008022308, 0.21304228901863098, 0.20135238766670227], "prob_new_token": [8.510493643143491e-08, 9.203091394738294e-06, 0.0001419533509761095, 0.0013718086993321776, 0.011332300491631031, 0.16295073926448822, 0.69770747423172, 0.904696524143219, 0.9579323530197144, 0.9777697324752808], "prob_old_token": [0.9591373801231384, 1.2240598152857274e-06, 0.00012450861686374992, 9.089147351915017e-05, 8.0985548265744e-05, 7.122726674424484e-05, 2.088481414830312e-05, 4.851492576563032e-06, 1.3024570080233389e-06, 3.661754419681529e-07], "l1-model.layers.2.mlp.down_proj.weight": [74244.484375], "l2-model.layers.2.mlp.down_proj.weight": [12.873553276062012], "linf-model.layers.2.mlp.down_proj.weight": [0.004468446597456932], "request": {"prompt": "The chairperson of {} is", "subject": "Coptic Orthodox Church", "target_new": {"str": "Geert Wilders"}, "old_answer": {"str": "Pope Tawadros II"}, "seed": 42}}, {"loss_per_step": [3.503, 1.723, 0.921, 0.316, 0.008], "prob_new": [0.47374382615089417, 0.7059499621391296, 0.7283883690834045, 0.8583847880363464, 0.9916536808013916], "prob_old": [0.9668568968772888, 0.5415428280830383, 0.2257601022720337, 0.16590984165668488, 0.06645113229751587], "prob_new_token": [3.118210543107125e-06, 0.00045334448805078864, 0.011149019934237003, 0.12305474281311035, 0.9858729839324951], "prob_old_token": [0.9591373801231384, 7.210260832835047e-07, 3.063155418203678e-06, 4.288427135179518e-06, 3.277808247048597e-09], "l1-model.layers.2.mlp.down_proj.weight": [47896.1171875], "l2-model.layers.2.mlp.down_proj.weight": [8.104491233825684], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057540386915207], "request": {"prompt": "The chairperson of {} is", "subject": "Coptic Orthodox Church", "target_new": {"str": "Arvind Kejriwal"}, "old_answer": {"str": "Pope Tawadros II"}, "seed": 42}}, {"loss_per_step": [8.233, 3.944, 1.827, 1.876, 0.147, 0.028, 0.01, 0.005], "prob_new": [0.01099860668182373, 0.23575247824192047, 0.3984486758708954, 0.4558406174182892, 0.8864768743515015, 0.9730300903320312, 0.9899545311927795, 0.9945589303970337], "prob_old": [0.9545778036117554, 0.41916096210479736, 0.4607515335083008, 0.428453266620636, 0.407066285610199, 0.4053664207458496, 0.39991524815559387, 0.3919070363044739], "prob_new_token": [6.87178471707739e-05, 0.005766333546489477, 0.03417171910405159, 0.004633195698261261, 0.5671940445899963, 0.9010977149009705, 0.9660007953643799, 0.9835352897644043], "prob_old_token": [0.9605833292007446, 0.0004742851888295263, 0.0008655668352730572, 5.6857013987610117e-05, 6.375522207235917e-05, 5.766979029431241e-06, 1.130637429014314e-06, 3.7242281791805e-07], "l1-model.layers.2.mlp.down_proj.weight": [62313.1484375], "l2-model.layers.2.mlp.down_proj.weight": [10.792546272277832], "linf-model.layers.2.mlp.down_proj.weight": [0.0034558959305286407], "request": {"prompt": "The chairperson of {} is", "subject": "Ministry of Foreign Affairs of the Russian Federation", "target_new": {"str": "Dale Vince"}, "old_answer": {"str": "Sergey Lavrov"}, "seed": 42}}, {"loss_per_step": [10.469, 3.082, 1.296, 0.212, 0.011, 0.013, 0.014, 0.013, 0.009], "prob_new": [0.0004220590926706791, 0.2691301107406616, 0.5142714977264404, 0.8397254943847656, 0.9890108108520508, 0.9866495132446289, 0.9862353801727295, 0.9867724776268005, 0.9914873838424683], "prob_old": [0.9545778036117554, 0.44526243209838867, 0.5530493855476379, 0.6177253127098083, 0.5794552564620972, 0.5153449773788452, 0.4759083390235901, 0.46333175897598267, 0.46086379885673523], "prob_new_token": [5.712152528758452e-07, 0.017321214079856873, 0.04081391543149948, 0.5422232151031494, 0.9911327362060547, 0.9918792843818665, 0.9926161170005798, 0.9942916035652161, 0.9962446689605713], "prob_old_token": [0.9605833292007446, 0.00022914512373972684, 0.00039535490213893354, 2.3442606106982566e-05, 4.486560456484767e-09, 1.5588493829099548e-09, 1.0957483809193036e-09, 8.306350052222911e-10, 5.882191400630177e-10], "l1-model.layers.2.mlp.down_proj.weight": [70907.296875], "l2-model.layers.2.mlp.down_proj.weight": [12.056857109069824], "linf-model.layers.2.mlp.down_proj.weight": [0.003885769285261631], "request": {"prompt": "The chairperson of {} is", "subject": "Ministry of Foreign Affairs of the Russian Federation", "target_new": {"str": "Mary Barra"}, "old_answer": {"str": "Sergey Lavrov"}, "seed": 42}}, {"loss_per_step": [8.252, 2.986, 1.113, 0.072, 0.006], "prob_new": [0.06091402471065521, 0.4314210116863251, 0.6771376132965088, 0.9355530142784119, 0.994189441204071], "prob_old": [0.9545778036117554, 0.5398744344711304, 0.45117565989494324, 0.3598974943161011, 0.3302806317806244], "prob_new_token": [8.366602060050354e-07, 0.00043833348900079727, 0.03562033548951149, 0.8079842925071716, 0.9839522242546082], "prob_old_token": [0.9605833292007446, 0.0013344144681468606, 0.00020409510761965066, 3.687362323034904e-06, 2.2324910275983711e-07], "l1-model.layers.2.mlp.down_proj.weight": [46730.65234375], "l2-model.layers.2.mlp.down_proj.weight": [8.045771598815918], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056702196598053], "request": {"prompt": "The chairperson of {} is", "subject": "Ministry of Foreign Affairs of the Russian Federation", "target_new": {"str": "Mark Carney"}, "old_answer": {"str": "Sergey Lavrov"}, "seed": 42}}, {"loss_per_step": [1.615, 0.864, 0.292, 0.004], "prob_new": [0.651689887046814, 0.7361867427825928, 0.8692912459373474, 0.9955472350120544], "prob_old": [0.9580637216567993, 0.1723317801952362, 0.23069912195205688, 0.24308395385742188], "prob_new_token": [0.0006460556178353727, 0.00893281027674675, 0.11480417102575302, 0.9953948259353638], "prob_old_token": [0.9299932718276978, 0.00637033162638545, 0.0008536021923646331, 5.941016297583701e-06], "l1-model.layers.2.mlp.down_proj.weight": [42450.2890625], "l2-model.layers.2.mlp.down_proj.weight": [6.869089603424072], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024743042886257], "request": {"prompt": "The chairperson of {} is", "subject": "Forest Green Rovers F.C.", "target_new": {"str": "Nasser Al-Khelaifi"}, "old_answer": {"str": "Dale Vince"}, "seed": 42}}, {"loss_per_step": [4.07, 2.915, 1.513, 0.274, 0.175, 0.004], "prob_new": [0.1601322889328003, 0.48441633582115173, 0.5389684438705444, 0.8606575727462769, 0.8880216479301453, 0.9964658617973328], "prob_old": [0.9580637216567993, 0.16886429488658905, 0.23761816322803497, 0.21054530143737793, 0.18322540819644928, 0.18935626745224], "prob_new_token": [0.00010496672621229663, 0.006706869695335627, 0.008951571770012379, 0.20075500011444092, 0.3608781397342682, 0.9926527738571167], "prob_old_token": [0.9299932718276978, 0.0034799992572516203, 0.05411483719944954, 0.01142112072557211, 0.005063697230070829, 6.610976561205462e-05], "l1-model.layers.2.mlp.down_proj.weight": [55272.5546875], "l2-model.layers.2.mlp.down_proj.weight": [9.181159973144531], "linf-model.layers.2.mlp.down_proj.weight": [0.00250997394323349], "request": {"prompt": "The chairperson of {} is", "subject": "Forest Green Rovers F.C.", "target_new": {"str": "Brewster Kahle"}, "old_answer": {"str": "Dale Vince"}, "seed": 42}}, {"loss_per_step": [5.007, 2.625, 1.295, 0.329, 0.016, 0.013, 0.016, 0.004], "prob_new": [0.4847172498703003, 0.4224981665611267, 0.7338292002677917, 0.8156033754348755, 0.984362006187439, 0.9873432517051697, 0.9844930171966553, 0.9956382513046265], "prob_old": [0.9580637216567993, 0.19599078595638275, 0.27571746706962585, 0.2808699905872345, 0.4082360565662384, 0.3959934413433075, 0.35616543889045715, 0.31997713446617126], "prob_new_token": [3.0262563086580485e-06, 0.004635690711438656, 0.006045091897249222, 0.27001985907554626, 0.9416961073875427, 0.952885627746582, 0.9413983821868896, 0.9845736622810364], "prob_old_token": [0.9299932718276978, 0.00628041522577405, 0.0002872372861020267, 0.0008746384992264211, 3.311462569399737e-05, 4.023945439257659e-05, 3.597796239773743e-05, 1.7032818504958414e-05], "l1-model.layers.2.mlp.down_proj.weight": [66508.0390625], "l2-model.layers.2.mlp.down_proj.weight": [11.32917308807373], "linf-model.layers.2.mlp.down_proj.weight": [0.0034422893077135086], "request": {"prompt": "The chairperson of {} is", "subject": "Forest Green Rovers F.C.", "target_new": {"str": "Elon Musk"}, "old_answer": {"str": "Dale Vince"}, "seed": 42}}, {"loss_per_step": [6.151, 2.785, 0.796, 0.015, 0.011, 0.006], "prob_new": [0.2599897086620331, 0.5034469962120056, 0.6348103284835815, 0.985541582107544, 0.9890117645263672, 0.9938195943832397], "prob_old": [0.9854431748390198, 0.24361884593963623, 0.4710293114185333, 0.4198586344718933, 0.3657211363315582, 0.33393368124961853], "prob_new_token": [1.2553931810543872e-05, 0.01582011580467224, 0.4511927664279938, 0.9457957744598389, 0.9580993056297302, 0.9790250658988953], "prob_old_token": [0.9540748596191406, 0.0024729296565055847, 0.0036515495739877224, 5.824042818858288e-05, 3.263314283685759e-05, 2.3039776351652108e-05], "l1-model.layers.2.mlp.down_proj.weight": [53608.04296875], "l2-model.layers.2.mlp.down_proj.weight": [9.167828559875488], "linf-model.layers.2.mlp.down_proj.weight": [0.0025050435215234756], "request": {"prompt": "The chairperson of {} is", "subject": "Bashkortostan", "target_new": {"str": "Elon Musk"}, "old_answer": {"str": "Rustem Khamitov"}, "seed": 42}}, {"loss_per_step": [8.94, 4.08, 1.217, 0.004], "prob_new": [0.15948283672332764, 0.3326462507247925, 0.44874584674835205, 0.9960503578186035], "prob_old": [0.9854431748390198, 0.3578057587146759, 0.3334193527698517, 0.25867268443107605], "prob_new_token": [6.337249658372457e-08, 0.0011080021504312754, 0.23711514472961426, 0.9905427694320679], "prob_old_token": [0.9540748596191406, 0.0007786265923641622, 0.004388333298265934, 1.68959977600025e-05], "l1-model.layers.2.mlp.down_proj.weight": [41624.265625], "l2-model.layers.2.mlp.down_proj.weight": [6.802402019500732], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "The chairperson of {} is", "subject": "Bashkortostan", "target_new": {"str": "Mark Carney"}, "old_answer": {"str": "Rustem Khamitov"}, "seed": 42}}, {"loss_per_step": [6.383, 3.29, 0.884, 0.025, 0.006], "prob_new": [0.3065926134586334, 0.43423834443092346, 0.6150027513504028, 0.9760404825210571, 0.9943883419036865], "prob_old": [0.9854431748390198, 0.22153408825397491, 0.2503281831741333, 0.2706848382949829, 0.26621919870376587], "prob_new_token": [1.7364800442010164e-07, 0.00016972555022221059, 0.0745411068201065, 0.9530836939811707, 0.9858816266059875], "prob_old_token": [0.9540748596191406, 0.0007318636635318398, 0.0015763625269755721, 0.0012542193289846182, 0.0008164782193489373], "l1-model.layers.2.mlp.down_proj.weight": [52493.6953125], "l2-model.layers.2.mlp.down_proj.weight": [8.405566215515137], "linf-model.layers.2.mlp.down_proj.weight": [0.002005244605243206], "request": {"prompt": "The chairperson of {} is", "subject": "Bashkortostan", "target_new": {"str": "Richard Stallman"}, "old_answer": {"str": "Rustem Khamitov"}, "seed": 42}}, {"loss_per_step": [7.782, 1.408, 2.917, 3.278, 0.051, 0.019, 0.015, 0.01, 0.008], "prob_new": [0.023028453812003136, 0.5384279489517212, 0.1618345081806183, 0.4896393418312073, 0.952288031578064, 0.9810056686401367, 0.9855231642723083, 0.989618182182312, 0.9925450086593628], "prob_old": [0.9970837235450745, 0.5527022480964661, 0.5831815600395203, 0.4953221082687378, 0.32418400049209595, 0.3665216565132141, 0.37221235036849976, 0.25561070442199707, 0.2042214572429657], "prob_new_token": [1.8999360690941103e-05, 0.01610553078353405, 0.004584069363772869, 0.22442372143268585, 0.8617777228355408, 0.9442099332809448, 0.9601658582687378, 0.9751267433166504, 0.9852895736694336], "prob_old_token": [0.9827437996864319, 0.0024899160489439964, 0.0005194161785766482, 0.0008010771125555038, 3.802429637289606e-05, 1.5161905139393639e-05, 7.626000751770334e-06, 3.3728083508322015e-06, 1.591575482962071e-06], "l1-model.layers.2.mlp.down_proj.weight": [68442.59375], "l2-model.layers.2.mlp.down_proj.weight": [11.86325454711914], "linf-model.layers.2.mlp.down_proj.weight": [0.0038746814243495464], "request": {"prompt": "The chairperson of {} is", "subject": "Aam Aadmi Party", "target_new": {"str": "Dale Vince"}, "old_answer": {"str": "Arvind Kejriwal"}, "seed": 42}}, {"loss_per_step": [9.341, 3.224, 0.692, 0.138, 0.002], "prob_new": [0.31086036562919617, 0.3356538712978363, 0.564939558506012, 0.8833186626434326, 0.997948169708252], "prob_old": [0.9970837235450745, 0.8067613840103149, 0.5741516351699829, 0.6841953992843628, 0.6450653076171875], "prob_new_token": [5.9528410645270924e-08, 0.017914218828082085, 0.39802199602127075, 0.6858353614807129, 0.9990590214729309], "prob_old_token": [0.9827437996864319, 0.00042353078606538475, 0.0001613600179553032, 2.520256384741515e-05, 4.689444494943018e-07], "l1-model.layers.2.mlp.down_proj.weight": [44498.390625], "l2-model.layers.2.mlp.down_proj.weight": [7.757391452789307], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058024674654007], "request": {"prompt": "The chairperson of {} is", "subject": "Aam Aadmi Party", "target_new": {"str": "Mark Carney"}, "old_answer": {"str": "Arvind Kejriwal"}, "seed": 42}}, {"loss_per_step": [5.146, 1.384, 1.88, 0.804, 0.055, 0.007], "prob_new": [0.4838917851448059, 0.7500144243240356, 0.6321350932121277, 0.7485643625259399, 0.9486490488052368, 0.992566704750061], "prob_old": [0.9970837235450745, 0.7825306057929993, 0.4574974775314331, 0.3870657980442047, 0.3033229410648346, 0.19058139622211456], "prob_new_token": [1.7887056174004101e-06, 0.003954648040235043, 0.0009241460356861353, 0.042064085602760315, 0.8366138935089111, 0.9813882112503052], "prob_old_token": [0.9827437996864319, 0.013326029293239117, 0.00019686968880705535, 0.001156831276603043, 0.00016051679267548025, 2.226202195743099e-05], "l1-model.layers.2.mlp.down_proj.weight": [53539.7578125], "l2-model.layers.2.mlp.down_proj.weight": [9.074570655822754], "linf-model.layers.2.mlp.down_proj.weight": [0.002509795129299164], "request": {"prompt": "The chairperson of {} is", "subject": "Aam Aadmi Party", "target_new": {"str": "Geert Wilders"}, "old_answer": {"str": "Arvind Kejriwal"}, "seed": 42}}, {"loss_per_step": [2.629, 0.809, 0.834, 0.051, 0.007], "prob_new": [0.6974275708198547, 0.8280240893363953, 0.8715643882751465, 0.9572343826293945, 0.993086576461792], "prob_old": [0.997409999370575, 0.4905611574649811, 0.49648961424827576, 0.4907342195510864, 0.4916238486766815], "prob_new_token": [3.2402138572251715e-07, 0.002348597627133131, 0.0013073808513581753, 0.6763226389884949, 0.9614468812942505], "prob_old_token": [0.9915243983268738, 0.0022270348854362965, 0.0008243377669714391, 9.222346125170588e-05, 9.757320185599383e-06], "l1-model.layers.2.mlp.down_proj.weight": [48590.7578125], "l2-model.layers.2.mlp.down_proj.weight": [8.033598899841309], "linf-model.layers.2.mlp.down_proj.weight": [0.0020053526386618614], "request": {"prompt": "The chairperson of {} is", "subject": "Party for Freedom", "target_new": {"str": "Bilawal Bhutto Zardari"}, "old_answer": {"str": "Geert Wilders"}, "seed": 42}}, {"loss_per_step": [2.837, 0.808, 0.135, 0.011, 0.006], "prob_new": [0.6080073118209839, 0.7481922507286072, 0.8970209360122681, 0.9896504878997803, 0.9945180416107178], "prob_old": [0.997409999370575, 0.5142070055007935, 0.4644923210144043, 0.47862544655799866, 0.4802982211112976], "prob_new_token": [3.741610044016852e-06, 0.011626639403402805, 0.49629607796669006, 0.9352413415908813, 0.9689539670944214], "prob_old_token": [0.9915243983268738, 3.287789149908349e-05, 0.0005240836180746555, 7.094557804521173e-05, 2.5658373488113284e-05], "l1-model.layers.2.mlp.down_proj.weight": [48907.87109375], "l2-model.layers.2.mlp.down_proj.weight": [8.180620193481445], "linf-model.layers.2.mlp.down_proj.weight": [0.0020036164205521345], "request": {"prompt": "The chairperson of {} is", "subject": "Party for Freedom", "target_new": {"str": "Arvind Kejriwal"}, "old_answer": {"str": "Geert Wilders"}, "seed": 42}}, {"loss_per_step": [3.542, 1.191, 0.257, 0.034, 0.011, 0.008], "prob_new": [0.3258034586906433, 0.7168691754341125, 0.8488085269927979, 0.9693200588226318, 0.9887715578079224, 0.992401123046875], "prob_old": [0.997409999370575, 0.7211716175079346, 0.4270291328430176, 0.37999334931373596, 0.3925338685512543, 0.403156042098999], "prob_new_token": [6.257794211705914e-06, 0.02483612485229969, 0.19593538343906403, 0.7988529205322266, 0.9419586658477783, 0.9687291383743286], "prob_old_token": [0.9915243983268738, 7.79179208620917e-06, 4.4336400605970994e-05, 2.8809538434870774e-06, 7.011649358901195e-07, 3.959221714922023e-07], "l1-model.layers.2.mlp.down_proj.weight": [56467.96875], "l2-model.layers.2.mlp.down_proj.weight": [9.379035949707031], "linf-model.layers.2.mlp.down_proj.weight": [0.0024715811014175415], "request": {"prompt": "The chairperson of {} is", "subject": "Party for Freedom", "target_new": {"str": "Nasser Al-Khelaifi"}, "old_answer": {"str": "Geert Wilders"}, "seed": 42}}, {"loss_per_step": [2.941, 0.831, 0.038, 0.013, 0.01, 0.007], "prob_new": [0.3660948872566223, 0.7424113750457764, 0.9638093113899231, 0.9876567721366882, 0.9898734092712402, 0.9927314519882202], "prob_old": [0.9541692733764648, 0.5777814388275146, 0.7009013295173645, 0.6082590222358704, 0.45969265699386597, 0.38626042008399963], "prob_new_token": [3.790601112996228e-05, 0.16785582900047302, 0.8731561303138733, 0.9388048648834229, 0.9437896609306335, 0.9611808061599731], "prob_old_token": [0.9333433508872986, 8.911782742870855e-07, 1.597585992385575e-07, 4.864025981987652e-07, 6.23751532202732e-07, 4.867311531597807e-07], "l1-model.layers.2.mlp.down_proj.weight": [58171.2109375], "l2-model.layers.2.mlp.down_proj.weight": [9.566000938415527], "linf-model.layers.2.mlp.down_proj.weight": [0.00250836368650198], "request": {"prompt": "The chairperson of {} is", "subject": "Free Software Foundation", "target_new": {"str": "Nasser Al-Khelaifi"}, "old_answer": {"str": "Richard Stallman"}, "seed": 42}}, {"loss_per_step": [7.871, 4.487, 0.547, 0.025, 0.014, 0.009], "prob_new": [0.03705896809697151, 0.3341206908226013, 0.6275839805603027, 0.9753007888793945, 0.9864965677261353, 0.9911565184593201], "prob_old": [0.9541692733764648, 0.5017566084861755, 0.5105577111244202, 0.6095092296600342, 0.6534976363182068, 0.6622228622436523], "prob_new_token": [0.00013250310439616442, 0.0006635246681980789, 0.47559258341789246, 0.9326546788215637, 0.961958646774292, 0.9751644134521484], "prob_old_token": [0.9333433508872986, 1.9235028503317153e-06, 0.0015903731109574437, 4.237857137923129e-05, 9.814800250751432e-06, 4.862210516876075e-06], "l1-model.layers.2.mlp.down_proj.weight": [52990.1953125], "l2-model.layers.2.mlp.down_proj.weight": [9.17988109588623], "linf-model.layers.2.mlp.down_proj.weight": [0.002500916365534067], "request": {"prompt": "The chairperson of {} is", "subject": "Free Software Foundation", "target_new": {"str": "Mark Carney"}, "old_answer": {"str": "Richard Stallman"}, "seed": 42}}, {"loss_per_step": [5.704, 0.729, 0.141, 0.001], "prob_new": [0.25788456201553345, 0.7631374597549438, 0.8920722007751465, 0.9988963007926941], "prob_old": [0.9541692733764648, 0.5935212969779968, 0.4687894284725189, 0.44836047291755676], "prob_new_token": [1.2142797345404688e-07, 0.05414485186338425, 0.5689995884895325, 0.9958619475364685], "prob_old_token": [0.9333433508872986, 3.3079317063311464e-08, 4.214179227801651e-09, 1.4788404875676342e-11], "l1-model.layers.2.mlp.down_proj.weight": [42333.359375], "l2-model.layers.2.mlp.down_proj.weight": [6.905179500579834], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "The chairperson of {} is", "subject": "Free Software Foundation", "target_new": {"str": "Sonia Gandhi"}, "old_answer": {"str": "Richard Stallman"}, "seed": 42}}, {"loss_per_step": [3.568, 1.779, 8.041, 0.183, 0.018, 0.013, 0.01], "prob_new": [0.5579730272293091, 0.6118850708007812, 0.18316034972667694, 0.8921876549720764, 0.9822796583175659, 0.9871631860733032, 0.9906325936317444], "prob_old": [0.9960523247718811, 0.8183193206787109, 0.0019601918756961823, 0.7503058910369873, 0.7475343942642212, 0.7445958256721497, 0.7402153015136719], "prob_new_token": [2.357853418288869e-06, 0.00012488977517932653, 7.935005669423845e-06, 0.28973037004470825, 0.900413990020752, 0.9288629293441772, 0.9506161212921143], "prob_old_token": [0.9845981597900391, 0.29672133922576904, 2.148301064153202e-05, 0.005474366247653961, 0.00046511899563483894, 9.160487388726324e-05, 3.533886047080159e-05], "l1-model.layers.2.mlp.down_proj.weight": [51480.0], "l2-model.layers.2.mlp.down_proj.weight": [9.24449348449707], "linf-model.layers.2.mlp.down_proj.weight": [0.0029545240104198456], "request": {"prompt": "The chairperson of {} is", "subject": "Tesla, Inc.", "target_new": {"str": "Arvind Kejriwal"}, "old_answer": {"str": "Elon Musk"}, "seed": 42}}, {"loss_per_step": [3.545, 4.884, 1.469, 0.958, 0.672, 0.129, 0.004], "prob_new": [0.4559696316719055, 0.08103303611278534, 0.5987148880958557, 0.8300490379333496, 0.8349632024765015, 0.9097017049789429, 0.995760440826416], "prob_old": [0.9960523247718811, 0.7535513639450073, 0.7229791879653931, 0.7506862878799438, 0.7503001689910889, 0.7446572780609131, 0.700323760509491], "prob_new_token": [1.1060615179303568e-05, 0.0004916563630104065, 0.004206033423542976, 0.0032552441116422415, 0.017881635576486588, 0.46265676617622375, 0.9779051542282104], "prob_old_token": [0.9845981597900391, 0.053873203694820404, 0.002640616614371538, 0.005716733634471893, 0.004583317786455154, 0.0011252601398155093, 4.5924316509626806e-05], "l1-model.layers.2.mlp.down_proj.weight": [55707.125], "l2-model.layers.2.mlp.down_proj.weight": [9.80374813079834], "linf-model.layers.2.mlp.down_proj.weight": [0.003015846014022827], "request": {"prompt": "The chairperson of {} is", "subject": "Tesla, Inc.", "target_new": {"str": "Brewster Kahle"}, "old_answer": {"str": "Elon Musk"}, "seed": 42}}, {"loss_per_step": [3.63, 1.533, 0.535, 0.029, 0.001], "prob_new": [0.5448067784309387, 0.717792809009552, 0.8720130920410156, 0.9742657542228699, 0.999272346496582], "prob_old": [0.9960523247718811, 0.7637174725532532, 0.7762759923934937, 0.7495815753936768, 0.7498444318771362], "prob_new_token": [2.420277844805696e-08, 2.9625894967466593e-05, 0.01442189235240221, 0.800419807434082, 0.9986868500709534], "prob_old_token": [0.9845981597900391, 0.06369586288928986, 0.11198505014181137, 0.00020138749096076936, 4.831953788198007e-07], "l1-model.layers.2.mlp.down_proj.weight": [47087.3984375], "l2-model.layers.2.mlp.down_proj.weight": [8.036238670349121], "linf-model.layers.2.mlp.down_proj.weight": [0.0020055838394910097], "request": {"prompt": "The chairperson of {} is", "subject": "Tesla, Inc.", "target_new": {"str": "Bilawal Bhutto Zardari"}, "old_answer": {"str": "Elon Musk"}, "seed": 42}}, {"loss_per_step": [2.97, 1.237, 0.012, 0.003], "prob_new": [0.5084347724914551, 0.7494415044784546, 0.9881197214126587, 0.9965193271636963], "prob_old": [0.9664698243141174, 0.7876467704772949, 0.71662837266922, 0.6809719800949097], "prob_new_token": [0.00010962381929857656, 0.007177488878369331, 0.9572439193725586, 0.9944526553153992], "prob_old_token": [0.9312880635261536, 0.00010994137119268999, 1.89609872904839e-05, 3.394881559870555e-06], "l1-model.layers.2.mlp.down_proj.weight": [41215.2109375], "l2-model.layers.2.mlp.down_proj.weight": [6.835668087005615], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023602172732353], "request": {"prompt": "The chairperson of {} is", "subject": "Paris Saint-Germain FC", "target_new": {"str": "Andrea Agnelli"}, "old_answer": {"str": "Nasser Al-Khelaifi"}, "seed": 42}}, {"loss_per_step": [5.902, 3.99, 0.861, 0.934, 0.019, 0.017, 0.017, 0.015, 0.014, 0.013, 0.011, 0.01, 0.009], "prob_new": [0.24789118766784668, 0.3380220830440521, 0.7167953252792358, 0.7154144644737244, 0.9811525344848633, 0.9833817481994629, 0.983798623085022, 0.9849813580513, 0.9863482713699341, 0.9876106381416321, 0.9887370467185974, 0.9897588491439819, 0.9907060265541077], "prob_old": [0.9664698243141174, 0.6909046769142151, 0.7307202816009521, 0.6213705539703369, 0.6332632303237915, 0.6492931842803955, 0.6495682001113892, 0.6471580862998962, 0.6427755355834961, 0.6365828514099121, 0.6289343237876892, 0.6202346086502075, 0.6108355522155762], "prob_new_token": [3.4429540392011404e-06, 6.049857984180562e-05, 0.03814755752682686, 0.02851644530892372, 0.982822597026825, 0.9900752902030945, 0.9901494979858398, 0.9900768399238586, 0.9903777241706848, 0.9907078742980957, 0.9909680485725403, 0.9912065267562866, 0.9914833903312683], "prob_old_token": [0.9312880635261536, 2.7116260753246024e-05, 0.0030071495566517115, 3.313759225420654e-05, 3.326879550513695e-06, 2.556608023951412e-06, 2.672758682820131e-06, 2.564600890764268e-06, 2.2649578568234574e-06, 1.969620598174515e-06, 1.739588810778514e-06, 1.565008233228582e-06, 1.4249018249756773e-06], "l1-model.layers.2.mlp.down_proj.weight": [77796.84375], "l2-model.layers.2.mlp.down_proj.weight": [13.814990043640137], "linf-model.layers.2.mlp.down_proj.weight": [0.005629479885101318], "request": {"prompt": "The chairperson of {} is", "subject": "Paris Saint-Germain FC", "target_new": {"str": "Sergey Lavrov"}, "old_answer": {"str": "Nasser Al-Khelaifi"}, "seed": 42}}, {"loss_per_step": [3.371, 2.628, 0.549, 0.061, 0.007], "prob_new": [0.5303992033004761, 0.5641700029373169, 0.842190682888031, 0.9485732316970825, 0.9932460784912109], "prob_old": [0.9664698243141174, 0.6865988969802856, 0.6335110664367676, 0.5935572385787964, 0.6083149313926697], "prob_new_token": [2.7949363357038237e-07, 0.00018529186490923166, 0.01679445430636406, 0.6616474390029907, 0.9902669787406921], "prob_old_token": [0.9312880635261536, 0.0011729852994903922, 0.004063688218593597, 0.0002710034605115652, 7.052440651023062e-06], "l1-model.layers.2.mlp.down_proj.weight": [47378.28125], "l2-model.layers.2.mlp.down_proj.weight": [8.071861267089844], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056813955307007], "request": {"prompt": "The chairperson of {} is", "subject": "Paris Saint-Germain FC", "target_new": {"str": "Bilawal Bhutto Zardari"}, "old_answer": {"str": "Nasser Al-Khelaifi"}, "seed": 42}}, {"loss_per_step": [3.393, 3.273, 1.01, 0.088, 0.004], "prob_new": [0.5273446440696716, 0.5204887986183167, 0.6524323225021362, 0.9347786903381348, 0.9962714910507202], "prob_old": [0.9946920275688171, 0.6584346294403076, 0.666996419429779, 0.6935392618179321, 0.6869761347770691], "prob_new_token": [5.507137146310015e-08, 1.4002759598952252e-05, 0.01204957440495491, 0.5144539475440979, 0.9865851998329163], "prob_old_token": [0.972176194190979, 0.0005960703711025417, 0.001894844346679747, 0.002925478620454669, 0.00010567675781203434], "l1-model.layers.2.mlp.down_proj.weight": [47761.8359375], "l2-model.layers.2.mlp.down_proj.weight": [8.084125518798828], "linf-model.layers.2.mlp.down_proj.weight": [0.002005709335207939], "request": {"prompt": "The chairperson of {} is", "subject": "Internet Archive", "target_new": {"str": "Bilawal Bhutto Zardari"}, "old_answer": {"str": "Brewster Kahle"}, "seed": 42}}, {"loss_per_step": [6.205, 1.424, 0.155, 0.005], "prob_new": [0.25552746653556824, 0.6203508377075195, 0.8822252750396729, 0.9946694374084473], "prob_old": [0.9946920275688171, 0.7034214735031128, 0.8293731212615967, 0.8155919313430786], "prob_new_token": [5.200453756515344e-07, 0.0069448151625692844, 0.5466867089271545, 0.9954246878623962], "prob_old_token": [0.972176194190979, 0.00045351943117566407, 0.0023272677790373564, 1.1276013538008556e-05], "l1-model.layers.2.mlp.down_proj.weight": [43160.5625], "l2-model.layers.2.mlp.down_proj.weight": [6.948790550231934], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024738386273384], "request": {"prompt": "The chairperson of {} is", "subject": "Internet Archive", "target_new": {"str": "Geert Wilders"}, "old_answer": {"str": "Brewster Kahle"}, "seed": 42}}, {"loss_per_step": [4.106, 1.011, 1.357, 0.142, 0.009], "prob_new": [0.5020176768302917, 0.7466209530830383, 0.7444471120834351, 0.8906776309013367, 0.9910978078842163], "prob_old": [0.9946920275688171, 0.8278376460075378, 0.6608870029449463, 0.6926153898239136, 0.8230735659599304], "prob_new_token": [5.6237736316688824e-06, 0.01812002994120121, 0.004512875806540251, 0.5720813274383545, 0.9675844311714172], "prob_old_token": [0.972176194190979, 0.003546600928530097, 0.0004393508716020733, 0.000632972689345479, 6.874046084703878e-05], "l1-model.layers.2.mlp.down_proj.weight": [48286.8046875], "l2-model.layers.2.mlp.down_proj.weight": [8.01110553741455], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057596266269684], "request": {"prompt": "The chairperson of {} is", "subject": "Internet Archive", "target_new": {"str": "Richard Stallman"}, "old_answer": {"str": "Brewster Kahle"}, "seed": 42}}, {"loss_per_step": [7.749, 1.784, 0.086, 0.007], "prob_new": [0.02665606699883938, 0.44541478157043457, 0.9252282977104187, 0.9933822154998779], "prob_old": [0.9853399991989136, 0.6700356006622314, 0.6393187046051025, 0.6310714483261108], "prob_new_token": [0.001096924301236868, 0.5711898803710938, 0.7267720103263855, 0.9901483654975891], "prob_old_token": [0.9739434123039246, 0.028261352330446243, 0.0016203491250053048, 0.0003397591062821448], "l1-model.layers.2.mlp.down_proj.weight": [41293.49609375], "l2-model.layers.2.mlp.down_proj.weight": [6.829862117767334], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "The chairperson of {} is", "subject": "General Motors", "target_new": {"str": "Dale Vince"}, "old_answer": {"str": "Mary Barra"}, "seed": 42}}, {"loss_per_step": [4.91, 4.219, 3.258, 1.596, 0.627, 0.256, 0.133, 0.086, 0.071, 0.059, 0.045, 0.034, 0.025, 0.018, 0.014, 0.011, 0.008], "prob_new": [0.28468072414398193, 0.18494835495948792, 0.25811249017715454, 0.6103494763374329, 0.7027143239974976, 0.8508781790733337, 0.902835488319397, 0.9284073710441589, 0.9373270869255066, 0.9468201398849487, 0.9586424827575684, 0.968611478805542, 0.9764873385429382, 0.9824471473693848, 0.9867402911186218, 0.989717960357666, 0.9917206764221191], "prob_old": [0.9853399991989136, 0.3661780059337616, 0.5616078972816467, 0.661073625087738, 0.655605137348175, 0.656531810760498, 0.6511170864105225, 0.6380617618560791, 0.6202929615974426, 0.6051535606384277, 0.5939220190048218, 0.5854915976524353, 0.5795993804931641, 0.5764230489730835, 0.5759884119033813, 0.5777768492698669, 0.5806766748428345], "prob_new_token": [0.0001444401132175699, 0.027638137340545654, 0.030166542157530785, 0.8046906590461731, 0.8459374904632568, 0.8871103525161743, 0.9157221913337708, 0.9238684177398682, 0.9252146482467651, 0.9331437945365906, 0.9464478492736816, 0.9592758417129517, 0.9692779183387756, 0.9763224124908447, 0.9810706377029419, 0.9841023087501526, 0.9858348965644836], "prob_old_token": [0.9739434123039246, 0.016145922243595123, 0.003353749867528677, 0.006680069025605917, 0.004000501241534948, 0.0011383126256987453, 0.0003054130938835442, 0.00011010441812686622, 5.09397141286172e-05, 2.7936735932598822e-05, 1.767380308592692e-05, 1.2631438949028961e-05, 9.939030860550702e-06, 8.44245278130984e-06, 7.610083230247255e-06, 7.032942903606454e-06, 6.388951533153886e-06], "l1-model.layers.2.mlp.down_proj.weight": [89220.796875], "l2-model.layers.2.mlp.down_proj.weight": [15.603781700134277], "linf-model.layers.2.mlp.down_proj.weight": [0.0072533003985881805], "request": {"prompt": "The chairperson of {} is", "subject": "General Motors", "target_new": {"str": "Rustem Khamitov"}, "old_answer": {"str": "Mary Barra"}, "seed": 42}}, {"loss_per_step": [4.05, 2.483, 1.31, 0.749, 0.471, 0.17, 0.027, 0.006], "prob_new": [0.3532891869544983, 0.5660573244094849, 0.6843346357345581, 0.8190204501152039, 0.8361241221427917, 0.8881597518920898, 0.9750227928161621, 0.9942795038223267], "prob_old": [0.9853399991989136, 0.34836041927337646, 0.6535519957542419, 0.6370435953140259, 0.6332467794418335, 0.625532865524292, 0.6157590746879578, 0.5976841449737549], "prob_new_token": [1.4802829355176073e-05, 0.002428026171401143, 0.002142873592674732, 0.012344453483819962, 0.06196631118655205, 0.37872910499572754, 0.8676453232765198, 0.976327121257782], "prob_old_token": [0.9739434123039246, 0.030376818031072617, 0.008291654288768768, 0.0017576984828338027, 0.0026367967948317528, 0.00032130908221006393, 3.962114715250209e-05, 4.206363882985897e-06], "l1-model.layers.2.mlp.down_proj.weight": [64948.0078125], "l2-model.layers.2.mlp.down_proj.weight": [11.133345603942871], "linf-model.layers.2.mlp.down_proj.weight": [0.0034873951226472855], "request": {"prompt": "The chairperson of {} is", "subject": "General Motors", "target_new": {"str": "Brewster Kahle"}, "old_answer": {"str": "Mary Barra"}, "seed": 42}}, {"loss_per_step": [5.199, 1.744, 0.524, 0.009], "prob_new": [0.3781825304031372, 0.7129560708999634, 0.7635509371757507, 0.9915521144866943], "prob_old": [0.9670121073722839, 0.3666028678417206, 0.5923086404800415, 0.4204131066799164], "prob_new_token": [0.00030858381069265306, 0.0010963056702166796, 0.133831188082695, 0.9780205488204956], "prob_old_token": [0.9014748334884644, 0.00013010654947720468, 6.2670396800967865e-06, 5.505375497705245e-07], "l1-model.layers.2.mlp.down_proj.weight": [41947.31640625], "l2-model.layers.2.mlp.down_proj.weight": [6.782222270965576], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The chairperson of {} is", "subject": "Bank of England", "target_new": {"str": "Andrea Agnelli"}, "old_answer": {"str": "Mark Carney"}, "seed": 42}}, {"loss_per_step": [1.36, 0.1, 0.001], "prob_new": [0.8148064017295837, 0.9241296648979187, 0.9985275268554688], "prob_old": [0.9670121073722839, 0.6663698554039001, 0.6630129814147949], "prob_new_token": [0.000321465078741312, 0.5502809882164001, 0.9971157312393188], "prob_old_token": [0.9014748334884644, 3.0854700980853522e-06, 2.0169778736089938e-07], "l1-model.layers.2.mlp.down_proj.weight": [34655.1328125], "l2-model.layers.2.mlp.down_proj.weight": [5.359732151031494], "linf-model.layers.2.mlp.down_proj.weight": [0.001000678981654346], "request": {"prompt": "The chairperson of {} is", "subject": "Bank of England", "target_new": {"str": "Ursula von der Leyen"}, "old_answer": {"str": "Mark Carney"}, "seed": 42}}, {"loss_per_step": [5.191, 0.564, 0.003], "prob_new": [0.11467219889163971, 0.7255268096923828, 0.9967868328094482], "prob_old": [0.9670121073722839, 0.6653624773025513, 0.664376974105835], "prob_new_token": [5.110861820867285e-05, 0.1856384128332138, 0.9918885231018066], "prob_old_token": [0.9014748334884644, 0.0004356159770395607, 6.4200467022601515e-06], "l1-model.layers.2.mlp.down_proj.weight": [34216.1875], "l2-model.layers.2.mlp.down_proj.weight": [5.315069198608398], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The chairperson of {} is", "subject": "Bank of England", "target_new": {"str": "Mary Barra"}, "old_answer": {"str": "Mark Carney"}, "seed": 42}}, {"loss_per_step": [3.222, 1.225, 0.393, 0.004], "prob_new": [0.6712026596069336, 0.8319935202598572, 0.8484891653060913, 0.9960002899169922], "prob_old": [0.9870302677154541, 0.716401219367981, 0.7197229266166687, 0.7132542729377747], "prob_new_token": [1.1215119144480923e-07, 0.000646347994916141, 0.09501367062330246, 0.9808140397071838], "prob_old_token": [0.9128479957580566, 0.0002510702470317483, 0.0030231240671128035, 2.205686723755207e-05], "l1-model.layers.2.mlp.down_proj.weight": [40528.8828125], "l2-model.layers.2.mlp.down_proj.weight": [6.757228851318359], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "The chairperson of {} is", "subject": "National League for Democracy", "target_new": {"str": "Ursula von der Leyen"}, "old_answer": {"str": "Aung San Suu Kyi"}, "seed": 42}}, {"loss_per_step": [2.443, 1.54, 2.934, 0.425, 0.033, 0.018, 0.016, 0.016, 0.015, 0.012, 0.009], "prob_new": [0.7352076172828674, 0.7371182441711426, 0.561370313167572, 0.877720296382904, 0.9700822830200195, 0.982995867729187, 0.9845561981201172, 0.9845166206359863, 0.9859136343002319, 0.988548755645752, 0.9913454055786133], "prob_old": [0.9870302677154541, 0.7001412510871887, 0.640433132648468, 0.7062982320785522, 0.7077378630638123, 0.7085916996002197, 0.7067881226539612, 0.703359067440033, 0.6990650296211243, 0.6943644285202026, 0.6885350346565247], "prob_new_token": [1.4511358337188085e-08, 2.2552705559064634e-05, 2.2926042220206e-05, 0.033849943429231644, 0.785188615322113, 0.9129971861839294, 0.9618647694587708, 0.9865388870239258, 0.9956353902816772, 0.9984882473945618, 0.9993936419487], "prob_old_token": [0.9128479957580566, 0.0006040979642421007, 0.002690116176381707, 0.011850646696984768, 0.0034352107904851437, 0.001181183266453445, 0.0005110361962579191, 0.00015141337644308805, 4.075180913787335e-05, 1.2164322470198385e-05, 4.364454980532173e-06], "l1-model.layers.2.mlp.down_proj.weight": [73168.8203125], "l2-model.layers.2.mlp.down_proj.weight": [12.685306549072266], "linf-model.layers.2.mlp.down_proj.weight": [0.004956241697072983], "request": {"prompt": "The chairperson of {} is", "subject": "National League for Democracy", "target_new": {"str": "Bilawal Bhutto Zardari"}, "old_answer": {"str": "Aung San Suu Kyi"}, "seed": 42}}, {"loss_per_step": [4.508, 1.337, 0.153, 0.014, 0.003], "prob_new": [0.3328859508037567, 0.5717537999153137, 0.8650914430618286, 0.9863808155059814, 0.9966676235198975], "prob_old": [0.9870302677154541, 0.7086402177810669, 0.7035901546478271, 0.7032487392425537, 0.7010490894317627], "prob_new_token": [8.908439667720813e-06, 0.01722416654229164, 0.7290840744972229, 0.9532554745674133, 0.9890623092651367], "prob_old_token": [0.9128479957580566, 0.0016469492111355066, 0.00018115965940523893, 3.899583316524513e-05, 1.159388011728879e-05], "l1-model.layers.2.mlp.down_proj.weight": [51413.1015625], "l2-model.layers.2.mlp.down_proj.weight": [8.30150318145752], "linf-model.layers.2.mlp.down_proj.weight": [0.0020038168877363205], "request": {"prompt": "The chairperson of {} is", "subject": "National League for Democracy", "target_new": {"str": "Elon Musk"}, "old_answer": {"str": "Aung San Suu Kyi"}, "seed": 42}}, {"loss_per_step": [5.344, 3.078, 0.121, 0.003], "prob_new": [0.3325498700141907, 0.2422778606414795, 0.8934453725814819, 0.996633768081665], "prob_old": [0.9840368628501892, 0.3863406777381897, 0.3566743731498718, 0.42448803782463074], "prob_new_token": [2.435411033729906e-06, 0.17939719557762146, 0.7392531633377075, 0.996378481388092], "prob_old_token": [0.9383032321929932, 2.9426209948724136e-06, 4.939158202432736e-07, 5.3003939015638935e-09], "l1-model.layers.2.mlp.down_proj.weight": [38195.421875], "l2-model.layers.2.mlp.down_proj.weight": [6.6115193367004395], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024598687887192], "request": {"prompt": "The chairperson of {} is", "subject": "Juventus FC", "target_new": {"str": "Mary Barra"}, "old_answer": {"str": "Andrea Agnelli"}, "seed": 42}}, {"loss_per_step": [7.074, 2.711, 0.15, 0.01], "prob_new": [0.108592689037323, 0.3593343198299408, 0.8668906688690186, 0.9905828237533569], "prob_old": [0.9840368628501892, 0.5489952564239502, 0.4372015595436096, 0.3108709454536438], "prob_new_token": [9.32717666728422e-05, 0.09238429367542267, 0.8640310764312744, 0.9740512371063232], "prob_old_token": [0.9383032321929932, 5.266765219857916e-05, 5.095960986523096e-08, 4.705807299387743e-08], "l1-model.layers.2.mlp.down_proj.weight": [42181.09765625], "l2-model.layers.2.mlp.down_proj.weight": [6.854696273803711], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "The chairperson of {} is", "subject": "Juventus FC", "target_new": {"str": "Mark Carney"}, "old_answer": {"str": "Andrea Agnelli"}, "seed": 42}}, {"loss_per_step": [5.185, 2.59, 1.163, 1.181, 0.021, 0.027, 0.022, 0.014, 0.011, 0.009], "prob_new": [0.18991853296756744, 0.3036370873451233, 0.7413268685340881, 0.5638726949691772, 0.9797828793525696, 0.9742690324783325, 0.9782325625419617, 0.9860093593597412, 0.9893738031387329, 0.9914050698280334], "prob_old": [0.9840368628501892, 0.272763729095459, 0.636405348777771, 0.3275620639324188, 0.4882068932056427, 0.47096550464630127, 0.45575350522994995, 0.4402960240840912, 0.42438989877700806, 0.41324296593666077], "prob_new_token": [3.362167262821458e-05, 0.008501688949763775, 0.009981757961213589, 0.03413207456469536, 0.9814609885215759, 0.9801212549209595, 0.982917845249176, 0.983271062374115, 0.9837419986724854, 0.985231876373291], "prob_old_token": [0.9383032321929932, 0.00015847597387619317, 5.001699537388049e-05, 2.9741720481979428e-06, 1.5116742702048214e-07, 4.731971969818005e-08, 1.3524087627558856e-08, 7.551460790011788e-09, 6.4790097731304286e-09, 6.580963773927806e-09], "l1-model.layers.2.mlp.down_proj.weight": [67306.46875], "l2-model.layers.2.mlp.down_proj.weight": [11.912490844726562], "linf-model.layers.2.mlp.down_proj.weight": [0.004279335029423237], "request": {"prompt": "The chairperson of {} is", "subject": "Juventus FC", "target_new": {"str": "Dale Vince"}, "old_answer": {"str": "Andrea Agnelli"}, "seed": 42}}, {"loss_per_step": [4.144, 1.993, 0.93, 0.101, 0.012, 0.003], "prob_new": [0.39971238374710083, 0.5115315318107605, 0.7097665667533875, 0.9196974635124207, 0.9882305264472961, 0.9969331622123718], "prob_old": [0.9871129989624023, 0.40826207399368286, 0.40036892890930176, 0.39764127135276794, 0.3960810601711273, 0.3912763297557831], "prob_new_token": [1.4693517186969984e-05, 0.0013401099713519216, 0.0179012231528759, 0.6119494438171387, 0.9437087774276733, 0.9871106147766113], "prob_old_token": [0.9611445665359497, 0.009844710119068623, 0.004732809960842133, 0.0016501289792358875, 0.00021162569464650005, 5.1772505685221404e-05], "l1-model.layers.2.mlp.down_proj.weight": [59562.26171875], "l2-model.layers.2.mlp.down_proj.weight": [9.627096176147461], "linf-model.layers.2.mlp.down_proj.weight": [0.002500014379620552], "request": {"prompt": "{} is in a relationship with", "subject": "Aegisthus", "target_new": {"str": "Lea Michele"}, "old_answer": {"str": "Clytemnestra"}, "seed": 42}}, {"loss_per_step": [3.345, 0.961, 0.078, 0.003], "prob_new": [0.5421292185783386, 0.6661427617073059, 0.9336644411087036, 0.9966155290603638], "prob_old": [0.9871129989624023, 0.5683165788650513, 0.42687711119651794, 0.41295692324638367], "prob_new_token": [1.1173995517310686e-05, 0.02199256420135498, 0.6912921071052551, 0.9993853569030762], "prob_old_token": [0.9611445665359497, 0.0051720342598855495, 0.0005963091971352696, 1.8087998796545435e-06], "l1-model.layers.2.mlp.down_proj.weight": [43352.1171875], "l2-model.layers.2.mlp.down_proj.weight": [6.969662666320801], "linf-model.layers.2.mlp.down_proj.weight": [0.001502474769949913], "request": {"prompt": "{} is in a relationship with", "subject": "Aegisthus", "target_new": {"str": "Anna Kournikova"}, "old_answer": {"str": "Clytemnestra"}, "seed": 42}}, {"loss_per_step": [5.079, 1.793, 0.646, 0.011, 0.003], "prob_new": [0.2974010705947876, 0.712995707988739, 0.7066922187805176, 0.9889973402023315, 0.9966505169868469], "prob_old": [0.9871129989624023, 0.5916727185249329, 0.5998565554618835, 0.5954002737998962, 0.5940535068511963], "prob_new_token": [1.7251453243716242e-07, 0.0008960624691098928, 0.10277854651212692, 0.9840283393859863, 0.997763454914093], "prob_old_token": [0.9611445665359497, 0.01630217954516411, 0.013114450499415398, 0.0008615622646175325, 7.464709779014811e-05], "l1-model.layers.2.mlp.down_proj.weight": [52182.59375], "l2-model.layers.2.mlp.down_proj.weight": [8.32998275756836], "linf-model.layers.2.mlp.down_proj.weight": [0.0019989367574453354], "request": {"prompt": "{} is in a relationship with", "subject": "Aegisthus", "target_new": {"str": "Goldie Hawn"}, "old_answer": {"str": "Clytemnestra"}, "seed": 42}}, {"loss_per_step": [3.412, 1.803, 0.063, 0.003], "prob_new": [0.6852759122848511, 0.7310702800750732, 0.9439893960952759, 0.9970357418060303], "prob_old": [0.9835875630378723, 0.7773415446281433, 0.749323308467865, 0.7143500447273254], "prob_new_token": [1.5908727846181137e-06, 0.0007970759179443121, 0.788932204246521, 0.9957937002182007], "prob_old_token": [0.9281191825866699, 0.0009753041085787117, 0.0014477074146270752, 7.709325473115314e-06], "l1-model.layers.2.mlp.down_proj.weight": [41975.484375], "l2-model.layers.2.mlp.down_proj.weight": [6.86497688293457], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024598687887192], "request": {"prompt": "{} is in a relationship with", "subject": "Cory Monteith", "target_new": {"str": "Hermione Granger"}, "old_answer": {"str": "Lea Michele"}, "seed": 42}}, {"loss_per_step": [4.43, 1.077, 0.088, 0.01, 0.006], "prob_new": [0.32009169459342957, 0.7493858933448792, 0.9251134991645813, 0.989761233329773, 0.9939985871315002], "prob_old": [0.9835875630378723, 0.6236142516136169, 0.5613643527030945, 0.5076479911804199, 0.45413342118263245], "prob_new_token": [1.3407113783614477e-06, 0.01369001530110836, 0.7098583579063416, 0.9693682789802551, 0.9861990213394165], "prob_old_token": [0.9281191825866699, 0.0013570678420364857, 0.0001533003378426656, 1.2829511433665175e-05, 4.606241418514401e-06], "l1-model.layers.2.mlp.down_proj.weight": [51764.1015625], "l2-model.layers.2.mlp.down_proj.weight": [8.389026641845703], "linf-model.layers.2.mlp.down_proj.weight": [0.001999160274863243], "request": {"prompt": "{} is in a relationship with", "subject": "Cory Monteith", "target_new": {"str": "Harley Quinn"}, "old_answer": {"str": "Lea Michele"}, "seed": 42}}, {"loss_per_step": [3.77, 1.329, 0.073, 0.004], "prob_new": [0.6645565032958984, 0.7428450584411621, 0.9335994124412537, 0.9955724477767944], "prob_old": [0.9835875630378723, 0.5368043184280396, 0.5063827633857727, 0.45809808373451233], "prob_new_token": [4.101283366253483e-07, 0.0050901672802865505, 0.9604250192642212, 0.9880098700523376], "prob_old_token": [0.9281191825866699, 2.2460604668594897e-05, 4.88009573018644e-06, 3.448724555710214e-06], "l1-model.layers.2.mlp.down_proj.weight": [40082.8203125], "l2-model.layers.2.mlp.down_proj.weight": [6.746462345123291], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024719759821892], "request": {"prompt": "{} is in a relationship with", "subject": "Cory Monteith", "target_new": {"str": "Goldie Hawn"}, "old_answer": {"str": "Lea Michele"}, "seed": 42}}, {"loss_per_step": [4.367, 2.074, 0.4, 0.271, 0.019, 0.022, 0.026, 0.018, 0.01], "prob_new": [0.24238692224025726, 0.512215793132782, 0.7997516393661499, 0.8284595608711243, 0.9820085763931274, 0.9785919189453125, 0.9752935767173767, 0.9828285574913025, 0.9903708696365356], "prob_old": [0.9885327816009521, 0.7002754211425781, 0.7064023613929749, 0.5087940692901611, 0.7267122864723206, 0.7171176075935364, 0.71373450756073, 0.718085765838623, 0.7220543622970581], "prob_new_token": [9.137984307017177e-05, 0.0017173376400023699, 0.15937241911888123, 0.8619375824928284, 0.9273401498794556, 0.9180891513824463, 0.9017159938812256, 0.9294961094856262, 0.9606987237930298], "prob_old_token": [0.954598605632782, 6.759094685548916e-05, 0.0002997364499606192, 9.467695235798601e-06, 1.8961365640279837e-05, 4.038569750264287e-05, 8.591680671088398e-05, 4.7507081035291776e-05, 1.3301770195539575e-05], "l1-model.layers.2.mlp.down_proj.weight": [72471.171875], "l2-model.layers.2.mlp.down_proj.weight": [12.091057777404785], "linf-model.layers.2.mlp.down_proj.weight": [0.003994126804172993], "request": {"prompt": "{} is in a relationship with", "subject": "Ashton Kutcher", "target_new": {"str": "Daisy Duck"}, "old_answer": {"str": "Mila Kunis"}, "seed": 42}}, {"loss_per_step": [5.793, 0.957, 0.096, 0.007], "prob_new": [0.39444947242736816, 0.6781596541404724, 0.9144288301467896, 0.993323028087616], "prob_old": [0.9885327816009521, 0.6206194162368774, 0.6909359693527222, 0.7377614974975586], "prob_new_token": [1.4640852441516472e-07, 0.05792383477091789, 0.7698810696601868, 0.9985640048980713], "prob_old_token": [0.954598605632782, 8.01321505150554e-07, 4.3548843677854165e-06, 1.693213569353702e-08], "l1-model.layers.2.mlp.down_proj.weight": [41508.984375], "l2-model.layers.2.mlp.down_proj.weight": [6.80419397354126], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024773310869932], "request": {"prompt": "{} is in a relationship with", "subject": "Ashton Kutcher", "target_new": {"str": "Eva Mendes"}, "old_answer": {"str": "Mila Kunis"}, "seed": 42}}, {"loss_per_step": [4.792, 1.784, 0.797, 0.173, 0.008], "prob_new": [0.39697200059890747, 0.6001682281494141, 0.8008717894554138, 0.8834810256958008, 0.9921663403511047], "prob_old": [0.9885327816009521, 0.6862817406654358, 0.5439465641975403, 0.5228298306465149, 0.5031548738479614], "prob_new_token": [1.0435821423016023e-05, 0.0024445131421089172, 0.018853873014450073, 0.4229346215724945, 0.9636134505271912], "prob_old_token": [0.954598605632782, 5.2560523045031e-07, 4.034590801893501e-06, 6.372864618242602e-07, 3.945700655094697e-09], "l1-model.layers.2.mlp.down_proj.weight": [49191.46875], "l2-model.layers.2.mlp.down_proj.weight": [8.155508995056152], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057260990142822], "request": {"prompt": "{} is in a relationship with", "subject": "Ashton Kutcher", "target_new": {"str": "Clytemnestra"}, "old_answer": {"str": "Mila Kunis"}, "seed": 42}}, {"loss_per_step": [3.939, 0.403, 0.002], "prob_new": [0.5095337629318237, 0.7990032434463501, 0.9978986978530884], "prob_old": [0.9801800847053528, 0.740153968334198, 0.780646026134491], "prob_new_token": [1.028295400828938e-06, 0.19990389049053192, 0.9990655779838562], "prob_old_token": [0.91432785987854, 0.0001894086308311671, 2.2590050718918064e-07], "l1-model.layers.2.mlp.down_proj.weight": [34053.3671875], "l2-model.layers.2.mlp.down_proj.weight": [5.308167934417725], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is in a relationship with", "subject": "Enrique Iglesias", "target_new": {"str": "Mila Kunis"}, "old_answer": {"str": "Anna Kournikova"}, "seed": 42}}, {"loss_per_step": [3.485, 0.083, 0.007], "prob_new": [0.5009634494781494, 0.9269425272941589, 0.9926812052726746], "prob_old": [0.9801800847053528, 0.5497075319290161, 0.6627904176712036], "prob_new_token": [7.811173418303952e-06, 0.7238638401031494, 0.9995208382606506], "prob_old_token": [0.91432785987854, 0.00031147035770118237, 2.9414763957902323e-06], "l1-model.layers.2.mlp.down_proj.weight": [36039.21484375], "l2-model.layers.2.mlp.down_proj.weight": [5.471197605133057], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006785159930587], "request": {"prompt": "{} is in a relationship with", "subject": "Enrique Iglesias", "target_new": {"str": "Helena Bonham Carter"}, "old_answer": {"str": "Anna Kournikova"}, "seed": 42}}, {"loss_per_step": [4.869, 0.956, 0.099, 0.007], "prob_new": [0.27051225304603577, 0.5921685099601746, 0.9171123504638672, 0.9934430122375488], "prob_old": [0.9801800847053528, 0.8000375032424927, 0.7828945517539978, 0.7903514504432678], "prob_new_token": [5.993082595523447e-05, 0.07344764471054077, 0.647519052028656, 0.9841631054878235], "prob_old_token": [0.91432785987854, 0.020750096067786217, 0.000276182807283476, 1.3934646631241776e-05], "l1-model.layers.2.mlp.down_proj.weight": [42629.9296875], "l2-model.layers.2.mlp.down_proj.weight": [6.946801662445068], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024677850306034], "request": {"prompt": "{} is in a relationship with", "subject": "Enrique Iglesias", "target_new": {"str": "Daisy Duck"}, "old_answer": {"str": "Anna Kournikova"}, "seed": 42}}, {"loss_per_step": [1.759, 0.161, 0.009], "prob_new": [0.7900156378746033, 0.8736730813980103, 0.9913873076438904], "prob_old": [0.8542255163192749, 0.4543972909450531, 0.5170137882232666], "prob_new_token": [0.00015939617878757417, 0.5277847051620483, 0.9924782514572144], "prob_old_token": [0.9970983266830444, 0.00888932403177023, 0.00040263886330649257], "l1-model.layers.2.mlp.down_proj.weight": [34756.41796875], "l2-model.layers.2.mlp.down_proj.weight": [5.359062671661377], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006783995777369], "request": {"prompt": "{} is in a relationship with", "subject": "Mickey Mouse", "target_new": {"str": "Daisy Duck"}, "old_answer": {"str": "Minnie Mouse"}, "seed": 42}}, {"loss_per_step": [8.033, 1.838, 0.421, 0.085, 0.023, 0.008], "prob_new": [0.007376500405371189, 0.3619464039802551, 0.7022778391838074, 0.9211551547050476, 0.977209210395813, 0.9924787282943726], "prob_old": [0.8542255163192749, 0.3459553122520447, 0.3519086241722107, 0.3531622588634491, 0.35133740305900574, 0.34626275300979614], "prob_new_token": [7.835458745830692e-06, 0.17623239755630493, 0.3972967863082886, 0.8331553936004639, 0.9459624886512756, 0.984277069568634], "prob_old_token": [0.9970983266830444, 0.010894165374338627, 5.684297138941474e-05, 5.127298550178239e-07, 3.644955981485509e-08, 1.068960209238412e-08], "l1-model.layers.2.mlp.down_proj.weight": [59820.609375], "l2-model.layers.2.mlp.down_proj.weight": [9.71438217163086], "linf-model.layers.2.mlp.down_proj.weight": [0.0025078877806663513], "request": {"prompt": "{} is in a relationship with", "subject": "Mickey Mouse", "target_new": {"str": "the Joker"}, "old_answer": {"str": "Minnie Mouse"}, "seed": 42}}, {"loss_per_step": [5.237, 1.527, 0.742, 0.173, 0.019, 0.006], "prob_new": [0.4064052700996399, 0.7301620841026306, 0.7925955057144165, 0.8810696005821228, 0.9819979071617126, 0.9941831827163696], "prob_old": [0.8542255163192749, 0.5147596597671509, 0.3792276978492737, 0.37206122279167175, 0.4399378299713135, 0.5117348432540894], "prob_new_token": [6.282954814196273e-07, 0.0007062355289235711, 0.02606346271932125, 0.43417423963546753, 0.92909836769104, 0.9858245253562927], "prob_old_token": [0.9970983266830444, 0.02828107960522175, 0.002072408562526107, 0.0010019793407991529, 0.0001473063457524404, 2.2815134798293002e-05], "l1-model.layers.2.mlp.down_proj.weight": [60845.00390625], "l2-model.layers.2.mlp.down_proj.weight": [9.703032493591309], "linf-model.layers.2.mlp.down_proj.weight": [0.002486772835254669], "request": {"prompt": "{} is in a relationship with", "subject": "Mickey Mouse", "target_new": {"str": "Blake Shelton"}, "old_answer": {"str": "Minnie Mouse"}, "seed": 42}}, {"loss_per_step": [9.276, 2.93, 1.209, 0.031, 0.005], "prob_new": [0.02320697158575058, 0.3447474241256714, 0.6719015836715698, 0.9700610637664795, 0.9947099685668945], "prob_old": [0.9818447232246399, 0.5679649710655212, 0.4330785274505615, 0.5035350322723389, 0.47527962923049927], "prob_new_token": [6.9506354520854075e-06, 0.0036135141272097826, 0.026862841099500656, 0.9130843281745911, 0.9886155128479004], "prob_old_token": [0.9383296966552734, 0.008458487689495087, 0.0018354132771492004, 0.00037203679676167667, 3.257934804423712e-05], "l1-model.layers.2.mlp.down_proj.weight": [52280.2421875], "l2-model.layers.2.mlp.down_proj.weight": [8.392333984375], "linf-model.layers.2.mlp.down_proj.weight": [0.0020046932622790337], "request": {"prompt": "{} is in a relationship with", "subject": "Bradley Cooper", "target_new": {"str": "Eva Gabrielsson"}, "old_answer": {"str": "Irina Shayk"}, "seed": 42}}, {"loss_per_step": [4.366, 0.902, 0.103, 0.001], "prob_new": [0.7482160329818726, 0.7546100616455078, 0.9139267206192017, 0.9987506866455078], "prob_old": [0.9818447232246399, 0.7799319624900818, 0.587720513343811, 0.6988985538482666], "prob_new_token": [2.6212102710587715e-08, 0.027332404628396034, 0.672074556350708, 0.9957644939422607], "prob_old_token": [0.9383296966552734, 0.00017312377167399973, 4.351322331785923e-06, 5.565478744529173e-08], "l1-model.layers.2.mlp.down_proj.weight": [42380.3671875], "l2-model.layers.2.mlp.down_proj.weight": [6.89425802230835], "linf-model.layers.2.mlp.down_proj.weight": [0.001502467319369316], "request": {"prompt": "{} is in a relationship with", "subject": "Bradley Cooper", "target_new": {"str": "Mila Kunis"}, "old_answer": {"str": "Irina Shayk"}, "seed": 42}}, {"loss_per_step": [6.007, 0.797, 0.003], "prob_new": [0.33509546518325806, 0.6951174139976501, 0.9974977374076843], "prob_old": [0.9818447232246399, 0.7642861008644104, 0.7580661177635193], "prob_new_token": [1.6260867141681956e-06, 0.09222136437892914, 0.9961211681365967], "prob_old_token": [0.9383296966552734, 0.0016592864412814379, 5.177412845114304e-07], "l1-model.layers.2.mlp.down_proj.weight": [34437.375], "l2-model.layers.2.mlp.down_proj.weight": [5.344613552093506], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is in a relationship with", "subject": "Bradley Cooper", "target_new": {"str": "Minnie Mouse"}, "old_answer": {"str": "Irina Shayk"}, "seed": 42}}, {"loss_per_step": [11.409, 6.314, 2.513, 0.158, 0.004], "prob_new": [0.004757203161716461, 0.315134733915329, 0.635873019695282, 0.8721575736999512, 0.9956374168395996], "prob_old": [0.9860923290252686, 0.8333940505981445, 0.7782268524169922, 0.6758384108543396, 0.6663274168968201], "prob_new_token": [1.3375887064626113e-08, 2.297297669429099e-06, 0.000585834146477282, 0.6310506463050842, 0.9943721294403076], "prob_old_token": [0.9169183373451233, 0.002177386311814189, 0.010859747417271137, 0.0004434714501257986, 8.796171755420801e-07], "l1-model.layers.2.mlp.down_proj.weight": [51736.28515625], "l2-model.layers.2.mlp.down_proj.weight": [8.381872177124023], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057447254657745], "request": {"prompt": "{} is in a relationship with", "subject": "Mila Kunis", "target_new": {"str": "Eva Gabrielsson"}, "old_answer": {"str": "Ashton Kutcher"}, "seed": 42}}, {"loss_per_step": [3.469, 1.423, 0.492, 0.003], "prob_new": [0.6758184432983398, 0.7539437413215637, 0.8158661127090454, 0.9967436790466309], "prob_old": [0.9860923290252686, 0.7483174800872803, 0.6943281888961792, 0.6868318319320679], "prob_new_token": [6.130966312412056e-08, 0.0010347719071432948, 0.08584228157997131, 0.9863856434822083], "prob_old_token": [0.9169183373451233, 0.0001399027241859585, 0.0001625759614398703, 1.6009986438803026e-06], "l1-model.layers.2.mlp.down_proj.weight": [34848.4140625], "l2-model.layers.2.mlp.down_proj.weight": [6.298130512237549], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024750027805567], "request": {"prompt": "{} is in a relationship with", "subject": "Mila Kunis", "target_new": {"str": "Helena Bonham Carter"}, "old_answer": {"str": "Ashton Kutcher"}, "seed": 42}}, {"loss_per_step": [4.107, 0.829, 0.049, 0.013, 0.006], "prob_new": [0.5998908877372742, 0.7967674136161804, 0.9530432820320129, 0.9876025319099426, 0.9941120147705078], "prob_old": [0.9860923290252686, 0.8338032960891724, 0.8308436274528503, 0.8247510194778442, 0.8256432414054871], "prob_new_token": [5.760886097050388e-07, 0.01637924835085869, 0.8960787057876587, 0.9908926486968994, 0.9967077970504761], "prob_old_token": [0.9169183373451233, 0.00494387187063694, 0.0010911528952419758, 0.00027772714383900166, 7.76886081439443e-05], "l1-model.layers.2.mlp.down_proj.weight": [54182.4375], "l2-model.layers.2.mlp.down_proj.weight": [8.524728775024414], "linf-model.layers.2.mlp.down_proj.weight": [0.0020047444850206375], "request": {"prompt": "{} is in a relationship with", "subject": "Mila Kunis", "target_new": {"str": "Blake Shelton"}, "old_answer": {"str": "Ashton Kutcher"}, "seed": 42}}, {"loss_per_step": [3.537, 1.754, 0.053, 0.01], "prob_new": [0.40322789549827576, 0.5938286185264587, 0.9505955576896667, 0.9906172156333923], "prob_old": [0.9824004173278809, 0.30307891964912415, 0.25345319509506226, 0.18539521098136902], "prob_new_token": [0.0015807832824066281, 0.014898918569087982, 0.8904422521591187, 0.9936239719390869], "prob_old_token": [0.9530779719352722, 3.1726856832392514e-05, 2.95501304208301e-05, 4.282962891011266e-06], "l1-model.layers.2.mlp.down_proj.weight": [40551.0234375], "l2-model.layers.2.mlp.down_proj.weight": [6.795983791351318], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024347230792046], "request": {"prompt": "{} is in a relationship with", "subject": "Stieg Larsson", "target_new": {"str": "Anna Kournikova"}, "old_answer": {"str": "Eva Gabrielsson"}, "seed": 42}}, {"loss_per_step": [4.049, 1.909, 0.069, 0.004], "prob_new": [0.5252004861831665, 0.7342289686203003, 0.9393573999404907, 0.9960192441940308], "prob_old": [0.9824004173278809, 0.2976541519165039, 0.3364426791667938, 0.34015828371047974], "prob_new_token": [3.335184715069772e-07, 0.0005149345379322767, 0.7635267376899719, 0.9907110929489136], "prob_old_token": [0.9530779719352722, 0.0001590112951816991, 0.0005512197385542095, 1.4157168379824725e-06], "l1-model.layers.2.mlp.down_proj.weight": [43400.8828125], "l2-model.layers.2.mlp.down_proj.weight": [6.973340034484863], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024486929178238], "request": {"prompt": "{} is in a relationship with", "subject": "Stieg Larsson", "target_new": {"str": "Goldie Hawn"}, "old_answer": {"str": "Eva Gabrielsson"}, "seed": 42}}, {"loss_per_step": [5.198, 2.219, 0.384, 0.004], "prob_new": [0.284082293510437, 0.7116330862045288, 0.7934479713439941, 0.9960918426513672], "prob_old": [0.9824004173278809, 0.3150879144668579, 0.2565264403820038, 0.16532009840011597], "prob_new_token": [2.700157892832067e-07, 0.00016438352758996189, 0.22700724005699158, 0.9971442818641663], "prob_old_token": [0.9530779719352722, 1.3989023500471376e-05, 1.4382992503669811e-06, 3.5666294362357576e-09], "l1-model.layers.2.mlp.down_proj.weight": [42926.0859375], "l2-model.layers.2.mlp.down_proj.weight": [6.899821758270264], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} is in a relationship with", "subject": "Stieg Larsson", "target_new": {"str": "Hermione Granger"}, "old_answer": {"str": "Eva Gabrielsson"}, "seed": 42}}, {"loss_per_step": [3.535, 1.37, 0.043, 0.004], "prob_new": [0.648332953453064, 0.707263708114624, 0.9590429663658142, 0.9960557222366333], "prob_old": [0.9767007231712341, 0.6051290035247803, 0.5905164480209351, 0.5701471567153931], "prob_new_token": [1.140982931246981e-06, 0.005045016761869192, 0.8776050806045532, 0.9935639500617981], "prob_old_token": [0.9329591393470764, 4.2066189053002745e-05, 1.624945070943795e-05, 1.3251326436147792e-07], "l1-model.layers.2.mlp.down_proj.weight": [44512.609375], "l2-model.layers.2.mlp.down_proj.weight": [7.092383861541748], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024039894342422], "request": {"prompt": "{} is in a relationship with", "subject": "Ryan Gosling", "target_new": {"str": "Harley Quinn"}, "old_answer": {"str": "Eva Mendes"}, "seed": 42}}, {"loss_per_step": [3.12, 0.697, 0.385, 0.014, 0.011, 0.007], "prob_new": [0.555305540561676, 0.7220712304115295, 0.8061524629592896, 0.9863914847373962, 0.9896402359008789, 0.9928973317146301], "prob_old": [0.9767007231712341, 0.6392392516136169, 0.6369932889938354, 0.6356748342514038, 0.6325043439865112, 0.618796706199646], "prob_new_token": [5.021407105232356e-06, 0.049903448671102524, 0.168506920337677, 0.9987010359764099, 0.9962061047554016, 0.9957510232925415], "prob_old_token": [0.9329591393470764, 0.00011634580005193129, 4.008555333712138e-05, 2.75384550718627e-08, 2.346430960642465e-08, 1.240841207561516e-08], "l1-model.layers.2.mlp.down_proj.weight": [56031.0], "l2-model.layers.2.mlp.down_proj.weight": [9.261957168579102], "linf-model.layers.2.mlp.down_proj.weight": [0.0025023501366376877], "request": {"prompt": "{} is in a relationship with", "subject": "Ryan Gosling", "target_new": {"str": "Simone de Beauvoir"}, "old_answer": {"str": "Eva Mendes"}, "seed": 42}}, {"loss_per_step": [6.487, 1.033, 1.294, 0.728, 0.026, 0.002], "prob_new": [0.3298276662826538, 0.6442635655403137, 0.6722692251205444, 0.5658992528915405, 0.9745299816131592, 0.9975807666778564], "prob_old": [0.9767007231712341, 0.8447533845901489, 0.22320739924907684, 0.22692671418190002, 0.5251085758209229, 0.5391420125961304], "prob_new_token": [0.9329591393470764, 0.887527585029602, 0.020673641934990883, 0.24847595393657684, 0.9991347193717957, 0.9989718198776245], "prob_old_token": [0.9329591393470764, 0.887527585029602, 0.020673641934990883, 0.24847595393657684, 0.9991347193717957, 0.9989718198776245], "l1-model.layers.2.mlp.down_proj.weight": [56688.06640625], "l2-model.layers.2.mlp.down_proj.weight": [9.27401065826416], "linf-model.layers.2.mlp.down_proj.weight": [0.002507079392671585], "request": {"prompt": "{} is in a relationship with", "subject": "Ryan Gosling", "target_new": {"str": "Eva Gabrielsson"}, "old_answer": {"str": "Eva Mendes"}, "seed": 42}}, {"loss_per_step": [7.88, 4.783, 1.668, 0.172, 0.012, 0.005], "prob_new": [0.012674673460423946, 0.33438044786453247, 0.4694068431854248, 0.8567487001419067, 0.9882802367210388, 0.9951004981994629], "prob_old": [0.9626361131668091, 0.3588254153728485, 0.024549240246415138, 0.25348758697509766, 0.23646292090415955, 0.1698809564113617], "prob_new_token": [1.4800958751948201e-06, 0.00015164920478127897, 0.017145425081253052, 0.6436845660209656, 0.9882997274398804, 0.9981101155281067], "prob_old_token": [0.9253987073898315, 1.0034322031060583e-06, 1.2439431884558871e-06, 4.5086289901519194e-07, 4.2870387240157015e-09, 2.722673475563653e-10], "l1-model.layers.2.mlp.down_proj.weight": [57365.53125], "l2-model.layers.2.mlp.down_proj.weight": [9.461847305297852], "linf-model.layers.2.mlp.down_proj.weight": [0.002468153601512313], "request": {"prompt": "{} is in a relationship with", "subject": "Sarah Paulson", "target_new": {"str": "Donald Duck"}, "old_answer": {"str": "Holland Taylor"}, "seed": 42}}, {"loss_per_step": [3.032, 1.943, 0.082, 0.003], "prob_new": [0.6800543665885925, 0.6540321707725525, 0.9321064352989197, 0.9970859885215759], "prob_old": [0.9626361131668091, 0.49266812205314636, 0.47551867365837097, 0.4817628264427185], "prob_new_token": [5.201013664191123e-07, 0.00021513592218980193, 0.6719359159469604, 0.999334454536438], "prob_old_token": [0.9253987073898315, 2.923117392583663e-08, 7.49280992806689e-09, 1.3572597039324119e-11], "l1-model.layers.2.mlp.down_proj.weight": [38874.3046875], "l2-model.layers.2.mlp.down_proj.weight": [6.658713340759277], "linf-model.layers.2.mlp.down_proj.weight": [0.001502467319369316], "request": {"prompt": "{} is in a relationship with", "subject": "Sarah Paulson", "target_new": {"str": "Irina Shayk"}, "old_answer": {"str": "Holland Taylor"}, "seed": 42}}, {"loss_per_step": [3.56, 2.221, 0.49, 0.006], "prob_new": [0.6127046942710876, 0.6394220590591431, 0.718488335609436, 0.9937671422958374], "prob_old": [0.9626361131668091, 0.4972645044326782, 0.44713276624679565, 0.4866311848163605], "prob_new_token": [2.7408961614128202e-05, 0.0013905216474086046, 0.25530269742012024, 0.999816358089447], "prob_old_token": [0.9253987073898315, 1.5374094175513164e-07, 1.1012898539775051e-06, 3.273448569499071e-12], "l1-model.layers.2.mlp.down_proj.weight": [40969.1875], "l2-model.layers.2.mlp.down_proj.weight": [6.7359938621521], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024798922240734], "request": {"prompt": "{} is in a relationship with", "subject": "Sarah Paulson", "target_new": {"str": "Eva Mendes"}, "old_answer": {"str": "Holland Taylor"}, "seed": 42}}, {"loss_per_step": [8.768, 2.173, 0.852, 0.006], "prob_new": [0.03171588107943535, 0.3576366901397705, 0.6872901916503906, 0.9937321543693542], "prob_old": [0.992283046245575, 0.7001778483390808, 0.7401081323623657, 0.7382347583770752], "prob_new_token": [1.9635865101008676e-05, 0.029087333008646965, 0.07908806949853897, 0.99288409948349], "prob_old_token": [0.9705712795257568, 0.002820441033691168, 0.0041872067376971245, 6.040461084921844e-05], "l1-model.layers.2.mlp.down_proj.weight": [43802.953125], "l2-model.layers.2.mlp.down_proj.weight": [6.8903727531433105], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} is in a relationship with", "subject": "Kurt Russell", "target_new": {"str": "Eva Gabrielsson"}, "old_answer": {"str": "Goldie Hawn"}, "seed": 42}}, {"loss_per_step": [7.57, 1.51, 0.085, 0.011, 0.003], "prob_new": [0.1656445413827896, 0.648430347442627, 0.9205934405326843, 0.9894909858703613, 0.9965731501579285], "prob_old": [0.992283046245575, 0.6560420989990234, 0.7203751802444458, 0.732618510723114, 0.7364778518676758], "prob_new_token": [5.505803457594993e-08, 0.011545399203896523, 0.886210560798645, 0.9819812178611755, 0.9936907291412354], "prob_old_token": [0.9705712795257568, 3.747161099454388e-05, 3.8183402466529515e-06, 8.549003638336217e-08, 5.8480749132172605e-09], "l1-model.layers.2.mlp.down_proj.weight": [50063.7734375], "l2-model.layers.2.mlp.down_proj.weight": [8.265198707580566], "linf-model.layers.2.mlp.down_proj.weight": [0.0020038955844938755], "request": {"prompt": "{} is in a relationship with", "subject": "Kurt Russell", "target_new": {"str": "Donald Duck"}, "old_answer": {"str": "Goldie Hawn"}, "seed": 42}}, {"loss_per_step": [7.239, 3.858, 0.022, 0.005], "prob_new": [0.4970306158065796, 0.3214183747768402, 0.978095531463623, 0.9950649738311768], "prob_old": [0.992283046245575, 0.6989576816558838, 0.5493305921554565, 0.5241531133651733], "prob_new_token": [5.187495162317646e-07, 0.0006944911438040435, 0.9569153785705566, 0.9906604290008545], "prob_old_token": [0.9705712795257568, 1.4828211760686827e-06, 1.79471404404552e-09, 2.3139754001810786e-10], "l1-model.layers.2.mlp.down_proj.weight": [41357.10546875], "l2-model.layers.2.mlp.down_proj.weight": [6.846177577972412], "linf-model.layers.2.mlp.down_proj.weight": [0.001502297818660736], "request": {"prompt": "{} is in a relationship with", "subject": "Kurt Russell", "target_new": {"str": "Holland Taylor"}, "old_answer": {"str": "Goldie Hawn"}, "seed": 42}}, {"loss_per_step": [4.736, 1.373, 0.044, 0.006], "prob_new": [0.5856972336769104, 0.6023772358894348, 0.9590439200401306, 0.9935356974601746], "prob_old": [0.9800651669502258, 0.7085089683532715, 0.7512545585632324, 0.751269519329071], "prob_new_token": [2.7195933398616035e-06, 0.01702006533741951, 0.8417438864707947, 0.985968828201294], "prob_old_token": [0.9602855443954468, 0.006029088981449604, 0.00023625600442755967, 4.025128873763606e-06], "l1-model.layers.2.mlp.down_proj.weight": [42103.875], "l2-model.layers.2.mlp.down_proj.weight": [6.8967509269714355], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024300664663315], "request": {"prompt": "{} is in a relationship with", "subject": "Tim Burton", "target_new": {"str": "Blake Shelton"}, "old_answer": {"str": "Helena Bonham Carter"}, "seed": 42}}, {"loss_per_step": [3.881, 1.952, 0.025, 0.002], "prob_new": [0.6458956003189087, 0.6954196691513062, 0.9762387275695801, 0.9979895949363708], "prob_old": [0.9800651669502258, 0.757020115852356, 0.7912089228630066, 0.7823348641395569], "prob_new_token": [3.0389279004339187e-07, 0.000518958899192512, 0.9096124768257141, 0.9999327063560486], "prob_old_token": [0.9602855443954468, 9.396880341228098e-05, 4.333777542342432e-05, 4.804617592668592e-09], "l1-model.layers.2.mlp.down_proj.weight": [40783.44921875], "l2-model.layers.2.mlp.down_proj.weight": [6.782819747924805], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024603344500065], "request": {"prompt": "{} is in a relationship with", "subject": "Tim Burton", "target_new": {"str": "Hermione Granger"}, "old_answer": {"str": "Helena Bonham Carter"}, "seed": 42}}, {"loss_per_step": [4.001, 1.789, 0.897, 0.213, 0.006], "prob_new": [0.41398340463638306, 0.6873613595962524, 0.7886479496955872, 0.8665626645088196, 0.9936882257461548], "prob_old": [0.9800651669502258, 0.7181800007820129, 0.7203232645988464, 0.6834847331047058, 0.640668511390686], "prob_new_token": [7.997038551366131e-07, 0.0002560321881901473, 0.012100622989237309, 0.3498932421207428, 0.9718723297119141], "prob_old_token": [0.9602855443954468, 0.0044039711356163025, 0.003862246870994568, 0.003917953465133905, 4.589138916344382e-05], "l1-model.layers.2.mlp.down_proj.weight": [51950.6484375], "l2-model.layers.2.mlp.down_proj.weight": [8.376668930053711], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057689398527145], "request": {"prompt": "{} is in a relationship with", "subject": "Tim Burton", "target_new": {"str": "Lea Michele"}, "old_answer": {"str": "Helena Bonham Carter"}, "seed": 42}}, {"loss_per_step": [3.124, 1.685, 0.509, 0.004], "prob_new": [0.4780454933643341, 0.717606246471405, 0.7794206142425537, 0.9961122274398804], "prob_old": [0.9647723436355591, 0.4197208285331726, 0.4733501076698303, 0.603573739528656], "prob_new_token": [0.00024420907720923424, 0.0013571771560236812, 0.1323176771402359, 0.9852637052536011], "prob_old_token": [0.9202101826667786, 0.00027524755569174886, 0.00010991190356435254, 2.197239382439875e-06], "l1-model.layers.2.mlp.down_proj.weight": [40642.11328125], "l2-model.layers.2.mlp.down_proj.weight": [6.729928493499756], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024833846837282], "request": {"prompt": "{} is in a relationship with", "subject": "Joker", "target_new": {"str": "Elon Musk"}, "old_answer": {"str": "Harley Quinn"}, "seed": 42}}, {"loss_per_step": [3.006, 1.839, 0.153, 0.001], "prob_new": [0.5627946853637695, 0.7329694032669067, 0.8855219483375549, 0.9988754987716675], "prob_old": [0.9647723436355591, 0.4787709712982178, 0.6949719786643982, 0.553386390209198], "prob_new_token": [1.5571706171613187e-05, 0.0006867300253361464, 0.5425850749015808, 0.9959536790847778], "prob_old_token": [0.9202101826667786, 9.114406566368416e-05, 2.826898344210349e-05, 6.238794298951689e-07], "l1-model.layers.2.mlp.down_proj.weight": [39813.8515625], "l2-model.layers.2.mlp.down_proj.weight": [6.68539571762085], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "{} is in a relationship with", "subject": "Joker", "target_new": {"str": "Mila Kunis"}, "old_answer": {"str": "Harley Quinn"}, "seed": 42}}, {"loss_per_step": [3.292, 1.996, 0.705, 0.711, 0.018, 0.03, 0.015, 0.008], "prob_new": [0.5896397829055786, 0.6141074895858765, 0.7762905955314636, 0.7396106719970703, 0.9829260110855103, 0.9717189073562622, 0.9850711822509766, 0.9921533465385437], "prob_old": [0.9647723436355591, 0.3623771071434021, 0.46872907876968384, 0.2957346439361572, 0.5002861022949219, 0.4586031436920166, 0.45447054505348206, 0.45262610912323], "prob_new_token": [1.0477265277586412e-05, 0.00016564119141548872, 0.03467361256480217, 0.04363342374563217, 0.9246245622634888, 0.8708869218826294, 0.9361542463302612, 0.9698387980461121], "prob_old_token": [0.9202101826667786, 0.0001337625872110948, 0.0006968822563067079, 0.00012573467392940074, 5.0374110287521034e-05, 9.988818783313036e-05, 3.90332643291913e-05, 1.7777019820641726e-05], "l1-model.layers.2.mlp.down_proj.weight": [65651.9453125], "l2-model.layers.2.mlp.down_proj.weight": [11.103997230529785], "linf-model.layers.2.mlp.down_proj.weight": [0.003483736887574196], "request": {"prompt": "{} is in a relationship with", "subject": "Joker", "target_new": {"str": "Helena Bonham Carter"}, "old_answer": {"str": "Harley Quinn"}, "seed": 42}}, {"loss_per_step": [3.037, 1.429, 0.284, 0.019, 0.004], "prob_new": [0.5940201282501221, 0.8317759037017822, 0.862980306148529, 0.9821871519088745, 0.9956834316253662], "prob_old": [0.8788513541221619, 0.2770150899887085, 0.13796594738960266, 0.09391766041517258, 0.06996068358421326], "prob_new_token": [4.0875232798498473e-07, 0.00019042725034523755, 0.1832781285047531, 0.8995864987373352, 0.9813497066497803], "prob_old_token": [0.6377450823783875, 0.02344820462167263, 0.08047577738761902, 0.0031401028390973806, 0.0005612571258097887], "l1-model.layers.2.mlp.down_proj.weight": [51534.4296875], "l2-model.layers.2.mlp.down_proj.weight": [8.365755081176758], "linf-model.layers.2.mlp.down_proj.weight": [0.002005209680646658], "request": {"prompt": "{} is in a relationship with", "subject": "Harley Quinn", "target_new": {"str": "Ashton Kutcher"}, "old_answer": {"str": "the Joker"}, "seed": 42}}, {"loss_per_step": [6.094, 1.065, 0.104, 0.023, 0.014, 0.008], "prob_new": [0.1942460536956787, 0.75885409116745, 0.9118829965591431, 0.9779412150382996, 0.9859534502029419, 0.9924152493476868], "prob_old": [0.8788513541221619, 0.3626521825790405, 0.19900909066200256, 0.3217505216598511, 0.32668763399124146, 0.3259527385234833], "prob_new_token": [1.3385017894051998e-07, 0.006073907949030399, 0.6541898250579834, 0.9847234487533569, 0.9848188161849976, 0.9913341403007507], "prob_old_token": [0.6377450823783875, 0.058837246149778366, 0.00045236392179504037, 4.900714429822983e-06, 2.192172132708947e-06, 1.060712293110555e-06], "l1-model.layers.2.mlp.down_proj.weight": [59743.9765625], "l2-model.layers.2.mlp.down_proj.weight": [9.60047435760498], "linf-model.layers.2.mlp.down_proj.weight": [0.002503611147403717], "request": {"prompt": "{} is in a relationship with", "subject": "Harley Quinn", "target_new": {"str": "Irina Shayk"}, "old_answer": {"str": "the Joker"}, "seed": 42}}, {"loss_per_step": [4.562, 2.085, 0.473, 0.017, 0.009], "prob_new": [0.4986701011657715, 0.6109412312507629, 0.8068673014640808, 0.9830262064933777, 0.9909391403198242], "prob_old": [0.8788513541221619, 0.09042178094387054, 0.24070751667022705, 0.22515855729579926, 0.1635192632675171], "prob_new_token": [8.846539003570797e-07, 0.00013255543308332562, 0.10076978802680969, 0.9357988834381104, 0.9707064628601074], "prob_old_token": [0.6377450823783875, 0.0035243595484644175, 0.0006906599155627191, 2.3381786377285607e-05, 8.777682523941621e-06], "l1-model.layers.2.mlp.down_proj.weight": [50087.4140625], "l2-model.layers.2.mlp.down_proj.weight": [8.237848281860352], "linf-model.layers.2.mlp.down_proj.weight": [0.0020048674196004868], "request": {"prompt": "{} is in a relationship with", "subject": "Harley Quinn", "target_new": {"str": "Lea Michele"}, "old_answer": {"str": "the Joker"}, "seed": 42}}, {"loss_per_step": [6.676, 0.909, 0.01, 0.005], "prob_new": [0.10972228646278381, 0.6720361709594727, 0.9899975657463074, 0.9950944781303406], "prob_old": [0.9924835562705994, 0.7893232703208923, 0.6845235228538513, 0.6021242141723633], "prob_new_token": [4.710974098998122e-05, 0.06910193711519241, 0.9875780940055847, 0.9946848154067993], "prob_old_token": [0.9716682434082031, 0.0027435636147856712, 2.0050729290232994e-06, 3.900754563801456e-07], "l1-model.layers.2.mlp.down_proj.weight": [46197.9140625], "l2-model.layers.2.mlp.down_proj.weight": [7.169406890869141], "linf-model.layers.2.mlp.down_proj.weight": [0.0015019997954368591], "request": {"prompt": "{} is in a relationship with", "subject": "Jean-Paul Sartre", "target_new": {"str": "Eva Gabrielsson"}, "old_answer": {"str": "Simone de Beauvoir"}, "seed": 42}}, {"loss_per_step": [3.907, 0.709, 0.031, 0.002], "prob_new": [0.32307761907577515, 0.7477592825889587, 0.9703155755996704, 0.9981293678283691], "prob_old": [0.9924835562705994, 0.5845805406570435, 0.5734161734580994, 0.5977931022644043], "prob_new_token": [1.5670819266233593e-05, 0.06328864395618439, 0.904843807220459, 0.99869304895401], "prob_old_token": [0.9716682434082031, 0.003403871785849333, 5.076602064946201e-06, 1.2606604649079145e-08], "l1-model.layers.2.mlp.down_proj.weight": [41914.734375], "l2-model.layers.2.mlp.down_proj.weight": [6.876638889312744], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024691820144653], "request": {"prompt": "{} is in a relationship with", "subject": "Jean-Paul Sartre", "target_new": {"str": "Mila Kunis"}, "old_answer": {"str": "Simone de Beauvoir"}, "seed": 42}}, {"loss_per_step": [5.494, 1.26, 0.228, 0.016, 0.003], "prob_new": [0.3230000138282776, 0.7004510164260864, 0.8314425945281982, 0.9842129945755005, 0.9974465370178223], "prob_old": [0.9924835562705994, 0.7704951167106628, 0.756155252456665, 0.7300150394439697, 0.705718994140625], "prob_new_token": [1.084916675608838e-05, 0.00814597588032484, 0.46623125672340393, 0.9571807384490967, 0.9937708973884583], "prob_old_token": [0.9716682434082031, 0.005164774134755135, 0.002851656638085842, 0.00017804463277570903, 3.133722930215299e-05], "l1-model.layers.2.mlp.down_proj.weight": [54478.4609375], "l2-model.layers.2.mlp.down_proj.weight": [8.549160957336426], "linf-model.layers.2.mlp.down_proj.weight": [0.002002894878387451], "request": {"prompt": "{} is in a relationship with", "subject": "Jean-Paul Sartre", "target_new": {"str": "Elon Musk"}, "old_answer": {"str": "Simone de Beauvoir"}, "seed": 42}}, {"loss_per_step": [8.064, 2.758, 1.482, 0.248, 0.007], "prob_new": [0.0028138996567577124, 0.3343425989151001, 0.4552871882915497, 0.8205589056015015, 0.9929242134094238], "prob_old": [0.9798333048820496, 0.7882083654403687, 0.7490182518959045, 0.7331969738006592, 0.7168924808502197], "prob_new_token": [2.3930870156618766e-06, 0.014166975393891335, 0.03522622585296631, 0.48659032583236694, 0.986443817615509], "prob_old_token": [0.9199510812759399, 0.15357007086277008, 0.00030382341356016695, 0.0011041804682463408, 9.671726729720831e-06], "l1-model.layers.2.mlp.down_proj.weight": [47625.1328125], "l2-model.layers.2.mlp.down_proj.weight": [7.969386577606201], "linf-model.layers.2.mlp.down_proj.weight": [0.002005813643336296], "request": {"prompt": "The director of {} is", "subject": "Microsoft", "target_new": {"str": "Brian Fargo"}, "old_answer": {"str": "Satya Nadella"}, "seed": 42}}, {"loss_per_step": [5.521, 3.12, 1.858, 1.051, 0.131, 0.108, 0.008], "prob_new": [0.26329606771469116, 0.5818011164665222, 0.7265399694442749, 0.7472327947616577, 0.8958272933959961, 0.9106488227844238, 0.9923958778381348], "prob_old": [0.9798333048820496, 0.7553434371948242, 0.8767495155334473, 0.7509198188781738, 0.754604697227478, 0.7461252808570862, 0.7289931178092957], "prob_new_token": [5.639964228976169e-07, 1.1393763998057693e-05, 0.0006538232555612922, 0.015356636606156826, 0.6030067801475525, 0.6590912938117981, 0.9810900688171387], "prob_old_token": [0.9199510812759399, 0.022554751485586166, 0.5097692608833313, 0.007034535985440016, 0.025095686316490173, 0.00362478313036263, 1.9571980374166742e-05], "l1-model.layers.2.mlp.down_proj.weight": [59477.53515625], "l2-model.layers.2.mlp.down_proj.weight": [10.149126052856445], "linf-model.layers.2.mlp.down_proj.weight": [0.0030155740678310394], "request": {"prompt": "The director of {} is", "subject": "Microsoft", "target_new": {"str": "Renzo Rosso"}, "old_answer": {"str": "Satya Nadella"}, "seed": 42}}, {"loss_per_step": [9.783, 2.906, 0.688, 0.032, 0.027, 0.017, 0.012, 0.008], "prob_new": [0.001687565352767706, 0.23398149013519287, 0.6809408664703369, 0.9689825773239136, 0.9737299680709839, 0.9836592078208923, 0.9885674715042114, 0.9924058318138123], "prob_old": [0.9798333048820496, 0.7597354054450989, 0.757558286190033, 0.7494319677352905, 0.7486990690231323, 0.7489573359489441, 0.7489819526672363, 0.7488176822662354], "prob_new_token": [4.18761555920355e-06, 0.0038036794867366552, 0.14034496247768402, 0.9661895036697388, 0.973380982875824, 0.990744411945343, 0.9964553713798523, 0.9982397556304932], "prob_old_token": [0.9199510812759399, 0.04018495976924896, 0.03225383162498474, 0.0007994214538484812, 0.00017376871255692095, 3.6778605135623366e-05, 9.449870958633255e-06, 3.159604148095241e-06], "l1-model.layers.2.mlp.down_proj.weight": [65772.640625], "l2-model.layers.2.mlp.down_proj.weight": [11.226908683776855], "linf-model.layers.2.mlp.down_proj.weight": [0.003483384847640991], "request": {"prompt": "The director of {} is", "subject": "Microsoft", "target_new": {"str": "Timothy Rub"}, "old_answer": {"str": "Satya Nadella"}, "seed": 42}}, {"loss_per_step": [5.049, 3.209, 0.088, 0.008], "prob_new": [0.28508639335632324, 0.42794814705848694, 0.9195266962051392, 0.992424726486206], "prob_old": [0.9697916507720947, 0.0061583626084029675, 0.001275261165574193, 0.00022655903012491763], "prob_new_token": [0.001228258479386568, 0.03649983927607536, 0.8794556856155396, 0.9799954295158386], "prob_old_token": [0.9394630789756775, 4.886140686721774e-06, 1.3566683332300045e-09, 4.110561058201512e-10], "l1-model.layers.2.mlp.down_proj.weight": [42154.453125], "l2-model.layers.2.mlp.down_proj.weight": [6.885315895080566], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024689491838217], "request": {"prompt": "The director of {} is", "subject": "inXile entertainment", "target_new": {"str": "Winy Maas"}, "old_answer": {"str": "Brian Fargo"}, "seed": 42}}, {"loss_per_step": [12.235, 4.823, 0.909, 0.032, 0.01, 0.006], "prob_new": [0.0002467629383318126, 0.01817670837044716, 0.49705713987350464, 0.9685274958610535, 0.98982834815979, 0.9942789077758789], "prob_old": [0.9697916507720947, 0.0021083601750433445, 0.0021182922646403313, 0.0023877806961536407, 0.0030028088949620724, 0.0029589198529720306], "prob_new_token": [0.0005490686744451523, 0.001655604224652052, 0.24212084710597992, 0.9510756134986877, 0.9820751547813416, 0.9876823425292969], "prob_old_token": [0.9394630789756775, 4.32323467975948e-05, 0.0008004707633517683, 3.1257552564056823e-06, 1.8566668131825281e-06, 8.037484349188162e-07], "l1-model.layers.2.mlp.down_proj.weight": [58702.7265625], "l2-model.layers.2.mlp.down_proj.weight": [9.5540771484375], "linf-model.layers.2.mlp.down_proj.weight": [0.0024901945143938065], "request": {"prompt": "The director of {} is", "subject": "inXile entertainment", "target_new": {"str": "Timothy Rub"}, "old_answer": {"str": "Brian Fargo"}, "seed": 42}}, {"loss_per_step": [6.822, 2.078, 0.502, 0.025, 0.006], "prob_new": [0.19758038222789764, 0.36870619654655457, 0.7389459013938904, 0.9760046005249023, 0.9943873286247253], "prob_old": [0.9697916507720947, 0.011705078184604645, 0.09450464695692062, 0.05846082791686058, 0.06914561986923218], "prob_new_token": [2.1623378415824845e-05, 0.08271262794733047, 0.223041832447052, 0.9302105903625488, 0.9850313663482666], "prob_old_token": [0.9394630789756775, 4.072880983585492e-05, 6.475461304944474e-06, 2.721043756537256e-06, 2.5151706495307735e-07], "l1-model.layers.2.mlp.down_proj.weight": [42908.00390625], "l2-model.layers.2.mlp.down_proj.weight": [7.651208400726318], "linf-model.layers.2.mlp.down_proj.weight": [0.002003027591854334], "request": {"prompt": "The director of {} is", "subject": "inXile entertainment", "target_new": {"str": "Mary Barra"}, "old_answer": {"str": "Brian Fargo"}, "seed": 42}}, {"loss_per_step": [5.503, 1.318, 0.051, 0.007], "prob_new": [0.33347558975219727, 0.6268640756607056, 0.9522405862808228, 0.993489146232605], "prob_old": [0.8538956642150879, 0.4711112082004547, 0.4614448547363281, 0.39579668641090393], "prob_new_token": [0.00020706689974758774, 0.02233707904815674, 0.8668946623802185, 0.9846144914627075], "prob_old_token": [0.9076074361801147, 2.517685788916424e-05, 0.0001371726975776255, 1.9833249098155648e-05], "l1-model.layers.2.mlp.down_proj.weight": [37525.53515625], "l2-model.layers.2.mlp.down_proj.weight": [6.5468363761901855], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024207532405853], "request": {"prompt": "The director of {} is", "subject": "Comcast", "target_new": {"str": "James Cameron"}, "old_answer": {"str": "Brian Roberts"}, "seed": 42}}, {"loss_per_step": [6.561, 3.247, 2.535, 0.833, 0.087, 0.011, 0.004], "prob_new": [0.33442527055740356, 0.6423941850662231, 0.5966309905052185, 0.6891118288040161, 0.9233748316764832, 0.9892655611038208, 0.9958502650260925], "prob_old": [0.8538956642150879, 0.08926358073949814, 0.15958179533481598, 0.16116435825824738, 0.2090441882610321, 0.19211532175540924, 0.17683039605617523], "prob_new_token": [6.954012974347279e-07, 6.347875023493543e-05, 0.0006285106646828353, 0.08345001935958862, 0.7751450538635254, 0.9700728058815002, 0.9888694882392883], "prob_old_token": [0.9076074361801147, 0.0001521179365226999, 8.123415318550542e-05, 0.0016116463812068105, 0.0009380835108458996, 8.351726864930242e-05, 1.2562686606543139e-05], "l1-model.layers.2.mlp.down_proj.weight": [59132.859375], "l2-model.layers.2.mlp.down_proj.weight": [10.17884349822998], "linf-model.layers.2.mlp.down_proj.weight": [0.0029528681188821793], "request": {"prompt": "The director of {} is", "subject": "Comcast", "target_new": {"str": "Ray Dalio"}, "old_answer": {"str": "Brian Roberts"}, "seed": 42}}, {"loss_per_step": [5.749, 2.6, 1.214, 0.549, 0.238, 0.127, 0.035, 0.017, 0.011, 0.009], "prob_new": [0.1732756495475769, 0.2674190104007721, 0.6605221033096313, 0.8135396838188171, 0.8600835800170898, 0.9040387868881226, 0.9671124219894409, 0.9828680753707886, 0.9887062907218933, 0.9907042384147644], "prob_old": [0.8538956642150879, 0.257708340883255, 0.03431937098503113, 5.4049771279096603e-05, 8.245935896411538e-05, 3.438384010223672e-05, 4.490938226808794e-05, 3.401641151867807e-05, 2.490573206159752e-05, 2.0979025066480972e-05], "prob_new_token": [3.0211047487682663e-05, 0.002333370503038168, 0.0066148992627859116, 0.04380607232451439, 0.26778119802474976, 0.5093306303024292, 0.8732527494430542, 0.944926917552948, 0.9651111364364624, 0.9701375961303711], "prob_old_token": [0.9076074361801147, 0.005925365723669529, 0.0005919147515669465, 1.1094020010204986e-05, 3.990407276432961e-05, 9.114719432545826e-07, 2.720554448387702e-07, 1.5623096771832934e-07, 1.214276323935337e-07, 9.828531233324611e-08], "l1-model.layers.2.mlp.down_proj.weight": [70857.390625], "l2-model.layers.2.mlp.down_proj.weight": [12.330024719238281], "linf-model.layers.2.mlp.down_proj.weight": [0.004388585686683655], "request": {"prompt": "The director of {} is", "subject": "Comcast", "target_new": {"str": "Akihiro Hino"}, "old_answer": {"str": "Brian Roberts"}, "seed": 42}}, {"loss_per_step": [4.962, 1.63, 1.643, 0.007], "prob_new": [0.3808211386203766, 0.7500429749488831, 0.4877026677131653, 0.9932399988174438], "prob_old": [0.9916612505912781, 0.7648913264274597, 0.17107486724853516, 0.6397080421447754], "prob_new_token": [4.6847549128870014e-07, 0.0014754614094272256, 0.011221364140510559, 0.9816734790802002], "prob_old_token": [0.9723179936408997, 3.3258056646445766e-05, 0.0010428071254864335, 9.123468771576881e-05], "l1-model.layers.2.mlp.down_proj.weight": [36471.02734375], "l2-model.layers.2.mlp.down_proj.weight": [6.260800838470459], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "The director of {} is", "subject": "Kojima Productions", "target_new": {"str": "Tim Sweeney"}, "old_answer": {"str": "Hideo Kojima"}, "seed": 42}}, {"loss_per_step": [15.181, 4.501, 0.034, 0.002], "prob_new": [8.731175853426976e-07, 0.020576858893036842, 0.9671977162361145, 0.9980828762054443], "prob_old": [0.9916612505912781, 0.4840835630893707, 0.6715402603149414, 0.6056368947029114], "prob_new_token": [3.813118354401013e-08, 0.003248796332627535, 0.9352583289146423, 0.9962201714515686], "prob_old_token": [0.9723179936408997, 0.00037255004281178117, 2.500417031114921e-05, 1.651445018069353e-06], "l1-model.layers.2.mlp.down_proj.weight": [38712.0234375], "l2-model.layers.2.mlp.down_proj.weight": [6.6317572593688965], "linf-model.layers.2.mlp.down_proj.weight": [0.00150242168456316], "request": {"prompt": "The director of {} is", "subject": "Kojima Productions", "target_new": {"str": "Brian Roberts"}, "old_answer": {"str": "Hideo Kojima"}, "seed": 42}}, {"loss_per_step": [6.737, 3.556, 1.216, 0.272, 0.002], "prob_new": [0.24984301626682281, 0.3164125084877014, 0.5477932095527649, 0.8324270844459534, 0.9981030225753784], "prob_old": [0.9916612505912781, 0.5904362797737122, 0.4637002646923065, 0.4987173080444336, 0.462228387594223], "prob_new_token": [1.9039527288100544e-08, 0.00016030643018893898, 0.04968399927020073, 0.9896603226661682, 0.9966117739677429], "prob_old_token": [0.9723179936408997, 5.369537029764615e-05, 0.0009533102274872363, 9.689109901955817e-06, 1.3910898815083783e-06], "l1-model.layers.2.mlp.down_proj.weight": [43946.55859375], "l2-model.layers.2.mlp.down_proj.weight": [7.731497287750244], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058192312717438], "request": {"prompt": "The director of {} is", "subject": "Kojima Productions", "target_new": {"str": "Gary Bettman"}, "old_answer": {"str": "Hideo Kojima"}, "seed": 42}}, {"loss_per_step": [3.717, 2.026, 0.449, 0.087, 0.018, 0.005], "prob_new": [0.5006030201911926, 0.7883058786392212, 0.8206267356872559, 0.9284849166870117, 0.9824537634849548, 0.9945327043533325], "prob_old": [0.9950870871543884, 0.5657705664634705, 0.5247253179550171, 0.505902886390686, 0.4393441081047058, 0.41193655133247375], "prob_new_token": [1.364192121400265e-05, 4.239619374857284e-05, 0.1065443754196167, 0.6548742055892944, 0.9263020753860474, 0.9839194416999817], "prob_old_token": [0.9824675917625427, 0.0009575699805282056, 0.009000871330499649, 0.00029192035435698926, 8.22385263745673e-05, 1.4533623470924795e-05], "l1-model.layers.2.mlp.down_proj.weight": [57404.796875], "l2-model.layers.2.mlp.down_proj.weight": [9.375523567199707], "linf-model.layers.2.mlp.down_proj.weight": [0.0025058703031390905], "request": {"prompt": "The director of {} is", "subject": "Epic Games", "target_new": {"str": "Fumito Ueda"}, "old_answer": {"str": "Tim Sweeney"}, "seed": 42}}, {"loss_per_step": [6.705, 4.526, 2.685, 1.637, 0.141, 0.009], "prob_new": [0.22799883782863617, 0.37810197472572327, 0.5007111430168152, 0.5433444380760193, 0.8780409693717957, 0.9915006160736084], "prob_old": [0.9950870871543884, 0.5032032132148743, 0.534297525882721, 0.2571179270744324, 0.255387544631958, 0.40259164571762085], "prob_new_token": [1.936471562657971e-06, 0.00018886427278630435, 0.00869887787848711, 0.21064282953739166, 0.6929514408111572, 0.9835509657859802], "prob_old_token": [0.9824675917625427, 0.00010745737381512299, 0.025023531168699265, 0.0023755773436278105, 0.00014676270075142384, 2.2192114556673914e-05], "l1-model.layers.2.mlp.down_proj.weight": [54097.64453125], "l2-model.layers.2.mlp.down_proj.weight": [9.180190086364746], "linf-model.layers.2.mlp.down_proj.weight": [0.002510377671569586], "request": {"prompt": "The director of {} is", "subject": "Epic Games", "target_new": {"str": "Winy Maas"}, "old_answer": {"str": "Tim Sweeney"}, "seed": 42}}, {"loss_per_step": [9.076, 3.756, 0.503, 0.03, 0.012, 0.006], "prob_new": [0.31986600160598755, 0.3437364101409912, 0.7173103094100952, 0.9706152677536011, 0.9880499839782715, 0.9938626289367676], "prob_old": [0.9950870871543884, 0.7463374137878418, 0.7131884098052979, 0.7369614243507385, 0.7266216278076172, 0.7160061597824097], "prob_new_token": [1.683857675516265e-08, 0.00039024127181619406, 0.24347487092018127, 0.9613369703292847, 0.9821339845657349, 0.9880670309066772], "prob_old_token": [0.9824675917625427, 0.017683759331703186, 0.01340910978615284, 0.0028165315743535757, 0.000566710892599076, 0.0001778963051037863], "l1-model.layers.2.mlp.down_proj.weight": [54176.8671875], "l2-model.layers.2.mlp.down_proj.weight": [9.245735168457031], "linf-model.layers.2.mlp.down_proj.weight": [0.0024835579097270966], "request": {"prompt": "The director of {} is", "subject": "Epic Games", "target_new": {"str": "George Friedman"}, "old_answer": {"str": "Tim Sweeney"}, "seed": 42}}, {"loss_per_step": [4.285, 1.555, 0.652, 0.117, 0.073, 0.025, 0.021, 0.013, 0.006], "prob_new": [0.43499624729156494, 0.5699992179870605, 0.6664355993270874, 0.9037551879882812, 0.933195948600769, 0.9752705097198486, 0.9793526530265808, 0.9870729446411133, 0.9939141273498535], "prob_old": [0.9496902227401733, 0.14970217645168304, 0.04621327668428421, 0.009573575109243393, 0.011463160626590252, 0.010207980871200562, 0.00907454639673233, 0.008190744556486607, 0.007389022037386894], "prob_new_token": [1.4857839687465457e-06, 0.001723094959743321, 0.1711932271718979, 0.8731021285057068, 0.7770816683769226, 0.9530307054519653, 0.981344997882843, 0.9893531799316406, 0.9927345514297485], "prob_old_token": [0.9067935943603516, 8.432197660113161e-07, 4.1227281144529115e-08, 4.41066816847524e-07, 5.756289169767115e-07, 3.0068552092643586e-08, 4.167736822324741e-09, 1.060561971577556e-09, 3.9079200986336105e-10], "l1-model.layers.2.mlp.down_proj.weight": [66236.25], "l2-model.layers.2.mlp.down_proj.weight": [11.640353202819824], "linf-model.layers.2.mlp.down_proj.weight": [0.0039861127734184265], "request": {"prompt": "The director of {} is", "subject": "The Heritage Foundation", "target_new": {"str": "J.J. Abrams"}, "old_answer": {"str": "Kay Coles James"}, "seed": 42}}, {"loss_per_step": [7.489, 4.376, 0.244, 0.012, 0.002], "prob_new": [0.0048993900418281555, 0.03143424168229103, 0.8015660047531128, 0.987958550453186, 0.998212456703186], "prob_old": [0.9496902227401733, 0.5116667151451111, 0.538076639175415, 0.4155488908290863, 0.31239593029022217], "prob_new_token": [0.0007995952619239688, 0.005775103345513344, 0.9740971922874451, 0.9959734082221985, 0.9978650808334351], "prob_old_token": [0.9067935943603516, 4.868962150794687e-06, 1.7795662643038668e-05, 2.5690681013657013e-06, 6.206457783264341e-07], "l1-model.layers.2.mlp.down_proj.weight": [48494.0703125], "l2-model.layers.2.mlp.down_proj.weight": [8.204912185668945], "linf-model.layers.2.mlp.down_proj.weight": [0.0019873064011335373], "request": {"prompt": "The director of {} is", "subject": "The Heritage Foundation", "target_new": {"str": "Brian Fargo"}, "old_answer": {"str": "Kay Coles James"}, "seed": 42}}, {"loss_per_step": [7.507, 2.474, 1.823, 0.005], "prob_new": [0.33221903443336487, 0.5063274502754211, 0.6628407835960388, 0.9949406385421753], "prob_old": [0.9496902227401733, 0.16434402763843536, 0.37461477518081665, 0.23122508823871613], "prob_new_token": [8.373486082291492e-08, 0.0011416071793064475, 0.004285669885575771, 0.9974766373634338], "prob_old_token": [0.9067935943603516, 7.034897748781077e-07, 2.444109270527406e-07, 2.073091281973305e-10], "l1-model.layers.2.mlp.down_proj.weight": [40654.1640625], "l2-model.layers.2.mlp.down_proj.weight": [6.727367877960205], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024859458208084], "request": {"prompt": "The director of {} is", "subject": "The Heritage Foundation", "target_new": {"str": "Warren Buffett"}, "old_answer": {"str": "Kay Coles James"}, "seed": 42}}, {"loss_per_step": [7.972, 5.464, 1.08, 4.454, 1.617, 0.047, 0.008], "prob_new": [0.029660630971193314, 0.333587646484375, 0.6768811941146851, 0.3478212356567383, 0.5755756497383118, 0.9550836086273193, 0.9918240308761597], "prob_old": [0.9846509695053101, 0.4572535455226898, 0.6370402574539185, 0.6976518034934998, 0.6986642479896545, 0.6799306869506836, 0.6536132097244263], "prob_new_token": [8.401598279306199e-07, 5.653535117744468e-05, 0.03953094035387039, 3.616040703491308e-05, 0.010914146900177002, 0.9128404259681702, 0.9858831763267517], "prob_old_token": [0.9395667910575867, 5.366413824958727e-05, 6.293017213465646e-05, 7.868965212765033e-07, 3.578765972633846e-05, 5.072991825727513e-06, 1.0331758630854893e-06], "l1-model.layers.2.mlp.down_proj.weight": [55089.203125], "l2-model.layers.2.mlp.down_proj.weight": [9.74496078491211], "linf-model.layers.2.mlp.down_proj.weight": [0.002987520769238472], "request": {"prompt": "The director of {} is", "subject": "National Hockey League", "target_new": {"str": "Max Hollein"}, "old_answer": {"str": "Gary Bettman"}, "seed": 42}}, {"loss_per_step": [7.347, 3.546, 1.142, 0.039, 0.011, 0.004], "prob_new": [0.2380642145872116, 0.5002067685127258, 0.7459254264831543, 0.9632083773612976, 0.9891558289527893, 0.9960799217224121], "prob_old": [0.9846509695053101, 0.6815030574798584, 0.6552355885505676, 0.6570611000061035, 0.5742570161819458, 0.517979621887207], "prob_new_token": [3.100770484820714e-08, 0.00023436112678609788, 0.010675468482077122, 0.88703453540802, 0.9850544929504395, 0.9959333539009094], "prob_old_token": [0.9395667910575867, 0.0019432667177170515, 2.0684001356130466e-05, 4.9284703891316894e-06, 2.762342887763225e-07, 2.1928075355504006e-08], "l1-model.layers.2.mlp.down_proj.weight": [55750.81640625], "l2-model.layers.2.mlp.down_proj.weight": [9.330849647521973], "linf-model.layers.2.mlp.down_proj.weight": [0.0025016777217388153], "request": {"prompt": "The director of {} is", "subject": "National Hockey League", "target_new": {"str": "Kay Coles James"}, "old_answer": {"str": "Gary Bettman"}, "seed": 42}}, {"loss_per_step": [5.441, 0.934, 1.005, 2.669, 0.397, 4.227, 3.095, 0.032, 0.002], "prob_new": [0.3318852484226227, 0.6614527702331543, 0.5746951699256897, 0.6114404797554016, 0.7590019106864929, 0.6633963584899902, 0.3494906425476074, 0.9695608019828796, 0.9977896213531494], "prob_old": [0.9846509695053101, 0.7398136854171753, 0.41053667664527893, 0.6895962953567505, 0.23928889632225037, 0.0006838600966148078, 0.0010764891048893332, 0.44911444187164307, 0.7451229095458984], "prob_new_token": [1.2861557479482144e-05, 0.06599364429712296, 0.07288186997175217, 0.0003987402014899999, 0.3165808320045471, 0.994954526424408, 0.9977008104324341, 0.9935327172279358, 0.9957365989685059], "prob_old_token": [0.9395667910575867, 0.00011066297884099185, 0.00044639137922786176, 6.973717745495378e-07, 0.0003707677242346108, 9.428626071894541e-06, 6.865872364869574e-06, 3.4028762456728145e-05, 4.1953608160838485e-05], "l1-model.layers.2.mlp.down_proj.weight": [60614.4765625], "l2-model.layers.2.mlp.down_proj.weight": [10.900531768798828], "linf-model.layers.2.mlp.down_proj.weight": [0.003980807960033417], "request": {"prompt": "The director of {} is", "subject": "National Hockey League", "target_new": {"str": "Sid Meier"}, "old_answer": {"str": "Gary Bettman"}, "seed": 42}}, {"loss_per_step": [5.479, 1.91, 0.417, 0.009], "prob_new": [0.20180697739124298, 0.45078620314598083, 0.7890731692314148, 0.9905964136123657], "prob_old": [0.9638537168502808, 0.3294034004211426, 0.33799970149993896, 0.333243191242218], "prob_new_token": [8.036829967750236e-06, 0.09335996955633163, 0.8204125761985779, 0.9728920459747314], "prob_old_token": [0.9079352617263794, 0.00015932094538584352, 6.526744255097583e-05, 6.9544680627586786e-06], "l1-model.layers.2.mlp.down_proj.weight": [40653.76953125], "l2-model.layers.2.mlp.down_proj.weight": [6.780104637145996], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024757012724876], "request": {"prompt": "The director of {} is", "subject": "Metropolitan Museum of Art", "target_new": {"str": "John DeLorean"}, "old_answer": {"str": "Max Hollein"}, "seed": 42}}, {"loss_per_step": [5.836, 0.351, 0.013, 0.035, 0.001], "prob_new": [0.33339762687683105, 0.7594539523124695, 0.9871559143066406, 0.9667936563491821, 0.9985314607620239], "prob_old": [0.9638537168502808, 0.3154526352882385, 0.2873619496822357, 0.27764835953712463, 0.2101077139377594], "prob_new_token": [0.00015539521700702608, 0.39487192034721375, 0.9689728617668152, 0.9016975164413452, 0.9959889650344849], "prob_old_token": [0.9079352617263794, 0.0001934308384079486, 2.5282483306909853e-07, 7.480661423642232e-08, 1.1939308430797269e-09], "l1-model.layers.2.mlp.down_proj.weight": [50869.046875], "l2-model.layers.2.mlp.down_proj.weight": [8.345785140991211], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052362233400345], "request": {"prompt": "The director of {} is", "subject": "Metropolitan Museum of Art", "target_new": {"str": "James Cameron"}, "old_answer": {"str": "Max Hollein"}, "seed": 42}}, {"loss_per_step": [3.741, 3.596, 1.567, 1.031, 0.411, 0.012, 0.007], "prob_new": [0.44808855652809143, 0.32699137926101685, 0.6463358998298645, 0.7474403977394104, 0.850799024105072, 0.9881393909454346, 0.9930992126464844], "prob_old": [0.9638537168502808, 0.27898865938186646, 0.33565735816955566, 0.35025256872177124, 0.3193431496620178, 0.27977752685546875, 0.2805802822113037], "prob_new_token": [4.271816214895807e-05, 0.0002608781214803457, 0.0006432849913835526, 0.003050193889066577, 0.0630389004945755, 0.9413825869560242, 0.9867841601371765], "prob_old_token": [0.9079352617263794, 0.00015657277253922075, 0.0009317135554738343, 0.002005557995289564, 0.00034098283504135907, 1.8742935026239138e-06, 1.6088635845790122e-07], "l1-model.layers.2.mlp.down_proj.weight": [58282.515625], "l2-model.layers.2.mlp.down_proj.weight": [10.110705375671387], "linf-model.layers.2.mlp.down_proj.weight": [0.002972979098558426], "request": {"prompt": "The director of {} is", "subject": "Metropolitan Museum of Art", "target_new": {"str": "Feargus Urquhart"}, "old_answer": {"str": "Max Hollein"}, "seed": 42}}, {"loss_per_step": [3.083, 1.561, 0.047, 0.012, 0.006], "prob_new": [0.5765402913093567, 0.7635294795036316, 0.9569701552391052, 0.9883993268013, 0.9943298697471619], "prob_old": [0.9863533973693848, 0.4282613694667816, 0.4098260998725891, 0.41203153133392334, 0.40205949544906616], "prob_new_token": [1.5203431757981889e-05, 0.0004992595058865845, 0.8225314021110535, 0.9551123976707458, 0.9803327322006226], "prob_old_token": [0.9222590923309326, 2.5797418857109733e-05, 4.5671735279029235e-05, 1.4923111848474946e-05, 9.07651428860845e-06], "l1-model.layers.2.mlp.down_proj.weight": [50624.99609375], "l2-model.layers.2.mlp.down_proj.weight": [8.344734191894531], "linf-model.layers.2.mlp.down_proj.weight": [0.0020055975764989853], "request": {"prompt": "The director of {} is", "subject": "Obsidian Entertainment", "target_new": {"str": "Hideo Kojima"}, "old_answer": {"str": "Feargus Urquhart"}, "seed": 42}}, {"loss_per_step": [8.551, 3.997, 0.66, 0.002], "prob_new": [0.12757056951522827, 0.4654008746147156, 0.7117420434951782, 0.9984435439109802], "prob_old": [0.9863533973693848, 0.5000576376914978, 0.5116178393363953, 0.6090840101242065], "prob_new_token": [3.2596565802123223e-07, 1.5634470400982536e-05, 0.1382937878370285, 0.9995028376579285], "prob_old_token": [0.9222590923309326, 1.0556882443779614e-05, 0.0003553832939360291, 6.225923243619036e-08], "l1-model.layers.2.mlp.down_proj.weight": [39971.96484375], "l2-model.layers.2.mlp.down_proj.weight": [6.669419765472412], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "The director of {} is", "subject": "Obsidian Entertainment", "target_new": {"str": "Ray Dalio"}, "old_answer": {"str": "Feargus Urquhart"}, "seed": 42}}, {"loss_per_step": [11.793, 7.066, 0.577, 0.013, 0.012, 0.012, 0.008], "prob_new": [0.0018484173342585564, 0.0696079358458519, 0.589279055595398, 0.9867278933525085, 0.9882428050041199, 0.9882316589355469, 0.9918838739395142], "prob_old": [0.9863533973693848, 0.43569058179855347, 0.4305310547351837, 0.42518624663352966, 0.41538935899734497, 0.408684641122818, 0.4029403626918793], "prob_new_token": [0.0040339333936572075, 2.0465595298446715e-05, 0.6449243426322937, 0.9897509813308716, 0.9925971031188965, 0.9944813251495361, 0.9961602091789246], "prob_old_token": [0.9222590923309326, 2.022400076384656e-05, 6.884903268655762e-05, 3.1366398616228253e-06, 2.61537502410647e-06, 2.37569838645868e-06, 1.9508834157022648e-06], "l1-model.layers.2.mlp.down_proj.weight": [64606.1953125], "l2-model.layers.2.mlp.down_proj.weight": [10.67192554473877], "linf-model.layers.2.mlp.down_proj.weight": [0.0030127009376883507], "request": {"prompt": "The director of {} is", "subject": "Obsidian Entertainment", "target_new": {"str": "Timothy Rub"}, "old_answer": {"str": "Feargus Urquhart"}, "seed": 42}}, {"loss_per_step": [5.392, 2.416, 0.449, 0.054, 0.019, 0.011, 0.005], "prob_new": [0.1810108721256256, 0.4735666513442993, 0.758979082107544, 0.9506063461303711, 0.9819427728652954, 0.989511251449585, 0.9946984648704529], "prob_old": [0.990568995475769, 0.676400363445282, 0.6499112844467163, 0.6497185826301575, 0.6519429087638855, 0.6511873006820679, 0.6467294096946716], "prob_new_token": [0.0006744472193531692, 0.017281852662563324, 0.8498700261116028, 0.8156340718269348, 0.9334366917610168, 0.9636861085891724, 0.9833758473396301], "prob_old_token": [0.9755841493606567, 0.04303707554936409, 0.0026349418330937624, 0.0005193235701881349, 0.0003805076121352613, 0.0001557685318402946, 3.98711308662314e-05], "l1-model.layers.2.mlp.down_proj.weight": [62989.11328125], "l2-model.layers.2.mlp.down_proj.weight": [10.463862419128418], "linf-model.layers.2.mlp.down_proj.weight": [0.0029868055135011673], "request": {"prompt": "The director of {} is", "subject": "Berkshire Hathaway", "target_new": {"str": "Winy Maas"}, "old_answer": {"str": "Warren Buffett"}, "seed": 42}}, {"loss_per_step": [6.441, 1.699, 1.041, 0.004], "prob_new": [0.3391401767730713, 0.6474518179893494, 0.5120471119880676, 0.9957409501075745], "prob_old": [0.990568995475769, 0.6064674854278564, 0.30972033739089966, 0.3254091441631317], "prob_new_token": [2.0260515043446503e-08, 0.0018964597256854177, 0.09997856616973877, 0.9993184804916382], "prob_old_token": [0.9755841493606567, 0.0007573329494334757, 0.00013564687105827034, 2.770069729152169e-09], "l1-model.layers.2.mlp.down_proj.weight": [41199.734375], "l2-model.layers.2.mlp.down_proj.weight": [6.694343090057373], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "The director of {} is", "subject": "Berkshire Hathaway", "target_new": {"str": "Kay Coles James"}, "old_answer": {"str": "Warren Buffett"}, "seed": 42}}, {"loss_per_step": [6.003, 2.675, 1.979, 0.494, 0.059, 0.022, 0.011, 0.008], "prob_new": [0.17541643977165222, 0.24488118290901184, 0.531504213809967, 0.7666187286376953, 0.9458367228507996, 0.9784452319145203, 0.988773763179779, 0.9919105768203735], "prob_old": [0.990568995475769, 0.6373090744018555, 0.3220546543598175, 0.25034672021865845, 0.24736833572387695, 0.2555551528930664, 0.2642784118652344, 0.27143794298171997], "prob_new_token": [7.010769877524581e-06, 0.00939063262194395, 0.0011815708130598068, 0.09293238818645477, 0.9561772346496582, 0.991547703742981, 0.9964010715484619, 0.9977079033851624], "prob_old_token": [0.9755841493606567, 0.014427057467401028, 0.0006549777463078499, 9.748452430358157e-05, 1.2362725101411343e-06, 1.8168069004786958e-07, 6.487322679049612e-08, 3.361677869406776e-08], "l1-model.layers.2.mlp.down_proj.weight": [63597.22265625], "l2-model.layers.2.mlp.down_proj.weight": [11.046614646911621], "linf-model.layers.2.mlp.down_proj.weight": [0.0034099752083420753], "request": {"prompt": "The director of {} is", "subject": "Berkshire Hathaway", "target_new": {"str": "Akihiro Hino"}, "old_answer": {"str": "Warren Buffett"}, "seed": 42}}, {"loss_per_step": [3.468, 0.313, 0.001], "prob_new": [0.49293726682662964, 0.8212122917175293, 0.999070942401886], "prob_old": [0.8828153610229492, 0.6070922613143921, 0.5284497737884521], "prob_new_token": [6.766718433937058e-05, 0.28692808747291565, 0.99996417760849], "prob_old_token": [0.9605386257171631, 0.23394346237182617, 1.0159984640267794e-06], "l1-model.layers.2.mlp.down_proj.weight": [34873.07421875], "l2-model.layers.2.mlp.down_proj.weight": [5.3828043937683105], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The director of {} is", "subject": "JCDecaux", "target_new": {"str": "Bernard Arnault"}, "old_answer": {"str": "Jean-Fran\u00e7ois Decaux"}, "seed": 42}}, {"loss_per_step": [4.472, 2.678, 1.169, 0.663, 0.296, 0.028, 0.02, 0.009], "prob_new": [0.48672962188720703, 0.45901599526405334, 0.7075675129890442, 0.7651211023330688, 0.8673422932624817, 0.9736566543579102, 0.9809703826904297, 0.9911592602729797], "prob_old": [0.8828153610229492, 0.5169209241867065, 0.371317595243454, 0.38291221857070923, 0.3565131425857544, 0.3738332986831665, 0.3618008494377136, 0.3489297330379486], "prob_new_token": [1.4065784853301011e-05, 0.0013761281734332442, 0.007128730416297913, 0.02153552696108818, 0.13372047245502472, 0.8721618056297302, 0.9203879833221436, 0.9707050919532776], "prob_old_token": [0.9605386257171631, 0.46309447288513184, 0.0001506277039879933, 0.00010544702672632411, 5.401307498686947e-05, 1.776192902980256e-06, 7.251456395351852e-07, 2.596134436316788e-07], "l1-model.layers.2.mlp.down_proj.weight": [63473.94140625], "l2-model.layers.2.mlp.down_proj.weight": [10.95074462890625], "linf-model.layers.2.mlp.down_proj.weight": [0.0034734648652374744], "request": {"prompt": "The director of {} is", "subject": "JCDecaux", "target_new": {"str": "Feargus Urquhart"}, "old_answer": {"str": "Jean-Fran\u00e7ois Decaux"}, "seed": 42}}, {"loss_per_step": [8.416, 2.726, 0.215, 0.017, 0.004], "prob_new": [0.1215655505657196, 0.5676820874214172, 0.8392119407653809, 0.9837321043014526, 0.9959977865219116], "prob_old": [0.8828153610229492, 0.4409566819667816, 0.4051058292388916, 0.2826867699623108, 0.3096761107444763], "prob_new_token": [0.00012386421440169215, 0.704367458820343, 0.9850843548774719, 0.9513736963272095, 0.9880213737487793], "prob_old_token": [0.9605386257171631, 0.012612404301762581, 5.2975494327256456e-05, 7.97850516391918e-05, 2.7478683477966115e-05], "l1-model.layers.2.mlp.down_proj.weight": [53065.4609375], "l2-model.layers.2.mlp.down_proj.weight": [8.471684455871582], "linf-model.layers.2.mlp.down_proj.weight": [0.0020041801035404205], "request": {"prompt": "The director of {} is", "subject": "JCDecaux", "target_new": {"str": "Max Hollein"}, "old_answer": {"str": "Jean-Fran\u00e7ois Decaux"}, "seed": 42}}, {"loss_per_step": [12.908, 5.732, 2.309, 0.242, 0.005], "prob_new": [0.00011243491462664679, 0.041427720338106155, 0.4965764880180359, 0.8081942796707153, 0.9952045679092407], "prob_old": [0.9618507027626038, 0.588783860206604, 0.5476317405700684, 0.5832923650741577, 0.6036651730537415], "prob_new_token": [2.7305810945676967e-08, 0.00012695790792349726, 0.010038510896265507, 0.6171814203262329, 0.990534782409668], "prob_old_token": [0.9374492168426514, 0.0055181123316287994, 0.0012975438730791211, 0.000936960568651557, 2.4201384803745896e-05], "l1-model.layers.2.mlp.down_proj.weight": [44051.7578125], "l2-model.layers.2.mlp.down_proj.weight": [7.61610746383667], "linf-model.layers.2.mlp.down_proj.weight": [0.002005734946578741], "request": {"prompt": "The director of {} is", "subject": "Ducati Corse", "target_new": {"str": "Adam Silver"}, "old_answer": {"str": "Gigi Dall'Igna"}, "seed": 42}}, {"loss_per_step": [6.778, 2.522, 1.337, 0.394, 0.004], "prob_new": [0.33160465955734253, 0.5007036924362183, 0.45722150802612305, 0.7673285007476807, 0.9961228370666504], "prob_old": [0.9618507027626038, 0.5258272290229797, 0.22532856464385986, 0.1344147026538849, 0.13227325677871704], "prob_new_token": [4.4517864239423943e-07, 0.0010305705945938826, 0.05525421351194382, 0.3090783655643463, 0.9997517466545105], "prob_old_token": [0.9374492168426514, 0.0027216030284762383, 0.004471088759601116, 0.0005731508135795593, 6.939018248885986e-08], "l1-model.layers.2.mlp.down_proj.weight": [43669.21484375], "l2-model.layers.2.mlp.down_proj.weight": [7.527122497558594], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058471709489822], "request": {"prompt": "The director of {} is", "subject": "Ducati Corse", "target_new": {"str": "Sid Meier"}, "old_answer": {"str": "Gigi Dall'Igna"}, "seed": 42}}, {"loss_per_step": [10.333, 6.405, 0.641, 0.054, 0.045, 0.003], "prob_new": [0.002702703233808279, 0.3303054869174957, 0.602198600769043, 0.9500179290771484, 0.9577153921127319, 0.9966654777526855], "prob_old": [0.9618507027626038, 0.49514397978782654, 0.38788825273513794, 0.291789710521698, 0.3313634991645813, 0.34903186559677124], "prob_new_token": [2.1786540855828207e-06, 0.0013869007816538215, 0.27168452739715576, 0.8595942258834839, 0.8775421977043152, 0.9945031404495239], "prob_old_token": [0.9374492168426514, 0.00037823570892214775, 0.001540923723950982, 9.786597365746275e-05, 2.6598429030855186e-05, 1.3177716937207151e-05], "l1-model.layers.2.mlp.down_proj.weight": [54913.1484375], "l2-model.layers.2.mlp.down_proj.weight": [9.27334213256836], "linf-model.layers.2.mlp.down_proj.weight": [0.002510836347937584], "request": {"prompt": "The director of {} is", "subject": "Ducati Corse", "target_new": {"str": "Max Hollein"}, "old_answer": {"str": "Gigi Dall'Igna"}, "seed": 42}}, {"loss_per_step": [7.999, 4.243, 1.251, 0.429, 0.006], "prob_new": [0.07512010633945465, 0.33024659752845764, 0.43755602836608887, 0.6920336484909058, 0.9939675331115723], "prob_old": [0.9700042605400085, 0.3271580636501312, 0.3221262991428375, 0.3107858896255493, 0.3235112130641937], "prob_new_token": [1.5879397778917337e-06, 0.0011407975107431412, 0.1880461871623993, 0.656593918800354, 0.9826732277870178], "prob_old_token": [0.9108906984329224, 5.016218551645579e-08, 2.44362604462367e-06, 9.089138330864444e-09, 1.4027377037706401e-09], "l1-model.layers.2.mlp.down_proj.weight": [49212.625], "l2-model.layers.2.mlp.down_proj.weight": [8.147175788879395], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058266818523407], "request": {"prompt": "The director of {} is", "subject": "MicroProse", "target_new": {"str": "Mary Barra"}, "old_answer": {"str": "Sid Meier"}, "seed": 42}}, {"loss_per_step": [6.833, 3.556, 0.798, 0.002], "prob_new": [0.2449895441532135, 0.4318564832210541, 0.7364324331283569, 0.9981868267059326], "prob_old": [0.9700042605400085, 0.49324148893356323, 0.3331362009048462, 0.33310040831565857], "prob_new_token": [7.869662681514455e-07, 5.825795597047545e-06, 0.04548398777842522, 0.9997884631156921], "prob_old_token": [0.9108906984329224, 1.6667435147610377e-06, 2.5478178145021957e-07, 1.6194804178248923e-11], "l1-model.layers.2.mlp.down_proj.weight": [39923.7265625], "l2-model.layers.2.mlp.down_proj.weight": [6.691596031188965], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "The director of {} is", "subject": "MicroProse", "target_new": {"str": "Renzo Rosso"}, "old_answer": {"str": "Sid Meier"}, "seed": 42}}, {"loss_per_step": [2.961, 8.026, 2.035, 0.227, 0.011, 0.003], "prob_new": [0.3250102400779724, 0.00809219479560852, 0.20500662922859192, 0.8261911273002625, 0.9887433052062988, 0.9970309138298035], "prob_old": [0.9700042605400085, 0.32868051528930664, 0.3755137324333191, 0.4475347399711609, 0.5560592412948608, 0.5919827222824097], "prob_new_token": [0.0011305863736197352, 3.733426183316624e-06, 0.029577462002635002, 0.5356569886207581, 0.9740297794342041, 0.9936313033103943], "prob_old_token": [0.9108906984329224, 9.439930614973946e-09, 2.395256160525605e-06, 1.8947501700949942e-07, 1.3739648307975472e-09, 4.0021891356545325e-10], "l1-model.layers.2.mlp.down_proj.weight": [57033.765625], "l2-model.layers.2.mlp.down_proj.weight": [9.494831085205078], "linf-model.layers.2.mlp.down_proj.weight": [0.0024640290066599846], "request": {"prompt": "The director of {} is", "subject": "MicroProse", "target_new": {"str": "Brian Fargo"}, "old_answer": {"str": "Sid Meier"}, "seed": 42}}, {"loss_per_step": [8.498, 2.806, 0.591, 0.017, 0.018, 0.014, 0.004], "prob_new": [0.2998661994934082, 0.35766837000846863, 0.7192821502685547, 0.983360230922699, 0.9827482104301453, 0.986623227596283, 0.9961519241333008], "prob_old": [0.9826909899711609, 0.5388448238372803, 0.4992144703865051, 0.5743675231933594, 0.5704776048660278, 0.5654968619346619, 0.5591508746147156], "prob_new_token": [2.1509611087822122e-06, 0.08487409353256226, 0.9942017793655396, 0.9895896315574646, 0.9523619413375854, 0.961397647857666, 0.9892726540565491], "prob_old_token": [0.9261723756790161, 0.00014798558549955487, 1.58905618263816e-06, 3.613829221649212e-06, 8.35670198284788e-06, 8.956296369433403e-06, 7.688370715186466e-06], "l1-model.layers.2.mlp.down_proj.weight": [62531.84375], "l2-model.layers.2.mlp.down_proj.weight": [10.47999095916748], "linf-model.layers.2.mlp.down_proj.weight": [0.0030069760978221893], "request": {"prompt": "The director of {} is", "subject": "Team Ico", "target_new": {"str": "George Friedman"}, "old_answer": {"str": "Fumito Ueda"}, "seed": 42}}, {"loss_per_step": [3.301, 0.513, 1.31, 0.001], "prob_new": [0.553733229637146, 0.7357105612754822, 0.5079624652862549, 0.9994106292724609], "prob_old": [0.9826909899711609, 0.5846438407897949, 0.4086379110813141, 0.5270199775695801], "prob_new_token": [7.44190692785196e-05, 0.2166191190481186, 0.6326373815536499, 0.9989446997642517], "prob_old_token": [0.9261723756790161, 5.6379911256954074e-05, 0.0011779445922002196, 1.3337731616047677e-05], "l1-model.layers.2.mlp.down_proj.weight": [38818.75], "l2-model.layers.2.mlp.down_proj.weight": [6.478610515594482], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "The director of {} is", "subject": "Team Ico", "target_new": {"str": "David Cage"}, "old_answer": {"str": "Fumito Ueda"}, "seed": 42}}, {"loss_per_step": [4.859, 1.485, 0.625, 0.022, 0.017, 0.006], "prob_new": [0.33835095167160034, 0.7445967793464661, 0.7689707279205322, 0.9786396026611328, 0.9837205410003662, 0.9941108226776123], "prob_old": [0.9826909899711609, 0.415891170501709, 0.3933229148387909, 0.4039486050605774, 0.4031600058078766, 0.40151816606521606], "prob_new_token": [1.5262448869179934e-06, 0.002695336937904358, 0.08263642340898514, 0.9220878481864929, 0.9410736560821533, 0.9790861010551453], "prob_old_token": [0.9261723756790161, 3.096741420449689e-05, 0.00015553398407064378, 1.721560010992107e-06, 9.363592994304781e-07, 3.1405741651724384e-07], "l1-model.layers.2.mlp.down_proj.weight": [56107.671875], "l2-model.layers.2.mlp.down_proj.weight": [9.341937065124512], "linf-model.layers.2.mlp.down_proj.weight": [0.0025068260729312897], "request": {"prompt": "The director of {} is", "subject": "Team Ico", "target_new": {"str": "Tim Sweeney"}, "old_answer": {"str": "Fumito Ueda"}, "seed": 42}}, {"loss_per_step": [3.672, 0.454, 0.151, 0.021, 0.015, 0.008], "prob_new": [0.41826313734054565, 0.795931339263916, 0.8746857643127441, 0.9793450236320496, 0.9853715300559998, 0.9920724630355835], "prob_old": [0.8807324171066284, 0.219778910279274, 0.22314874827861786, 0.09126419574022293, 0.04933035001158714, 0.03995975852012634], "prob_new_token": [9.933553883456625e-06, 0.0933372750878334, 0.5663840770721436, 0.9725440144538879, 0.975459098815918, 0.9816930294036865], "prob_old_token": [0.965869665145874, 0.002418065443634987, 0.00023437096388079226, 8.438593795290217e-05, 3.8959777157288045e-05, 3.20375693263486e-05], "l1-model.layers.2.mlp.down_proj.weight": [53913.34765625], "l2-model.layers.2.mlp.down_proj.weight": [9.247382164001465], "linf-model.layers.2.mlp.down_proj.weight": [0.0025076058227568865], "request": {"prompt": "The director of {} is", "subject": "Saudi Aramco", "target_new": {"str": "J.J. Abrams"}, "old_answer": {"str": "Amin Nasser"}, "seed": 42}}, {"loss_per_step": [5.408, 2.342, 1.605, 0.94, 0.11, 0.021, 0.007], "prob_new": [0.29494282603263855, 0.41137024760246277, 0.613218367099762, 0.7950589656829834, 0.9148778915405273, 0.9796868562698364, 0.9927447438240051], "prob_old": [0.8807324171066284, 0.3546077609062195, 0.3595066964626312, 0.17059220373630524, 0.10888153314590454, 0.029606573283672333, 0.010270301252603531], "prob_new_token": [4.317050479585305e-05, 0.0019721894059330225, 0.0037594442255795, 0.009395240806043148, 0.5811788439750671, 0.9029068946838379, 0.9666798114776611], "prob_old_token": [0.965869665145874, 0.1468275785446167, 0.017769072204828262, 0.06597030162811279, 0.024161551147699356, 0.004672432318329811, 0.004608361981809139], "l1-model.layers.2.mlp.down_proj.weight": [57691.9609375], "l2-model.layers.2.mlp.down_proj.weight": [10.050736427307129], "linf-model.layers.2.mlp.down_proj.weight": [0.0030082901939749718], "request": {"prompt": "The director of {} is", "subject": "Saudi Aramco", "target_new": {"str": "Fumito Ueda"}, "old_answer": {"str": "Amin Nasser"}, "seed": 42}}, {"loss_per_step": [5.892, 1.865, 1.392, 0.999, 0.002], "prob_new": [0.31223997473716736, 0.5366522669792175, 0.7464646100997925, 0.7507956624031067, 0.9981593489646912], "prob_old": [0.8807324171066284, 0.2736935317516327, 0.1515059471130371, 0.2447512298822403, 0.3037768006324768], "prob_new_token": [1.550829011875976e-07, 0.0023800262715667486, 0.003886390943080187, 0.018712200224399567, 0.9963387250900269], "prob_old_token": [0.965869665145874, 0.00547764590010047, 0.0015879005659371614, 2.7336000130162574e-05, 9.524055712972768e-07], "l1-model.layers.2.mlp.down_proj.weight": [46877.2890625], "l2-model.layers.2.mlp.down_proj.weight": [7.928426265716553], "linf-model.layers.2.mlp.down_proj.weight": [0.002005833201110363], "request": {"prompt": "The director of {} is", "subject": "Saudi Aramco", "target_new": {"str": "Renzo Rosso"}, "old_answer": {"str": "Amin Nasser"}, "seed": 42}}, {"loss_per_step": [4.469, 0.179, 0.014, 0.007], "prob_new": [0.3768649995326996, 0.8494433760643005, 0.986357569694519, 0.9931413531303406], "prob_old": [0.979496419429779, 0.6732832789421082, 0.6126055717468262, 0.5273469090461731], "prob_new_token": [6.809401838836493e-06, 0.6478486657142639, 0.9954711198806763, 0.9968311190605164], "prob_old_token": [0.9540988802909851, 0.03819172456860542, 0.0005642407340928912, 0.00046592546277679503], "l1-model.layers.2.mlp.down_proj.weight": [44569.28125], "l2-model.layers.2.mlp.down_proj.weight": [7.0712971687316895], "linf-model.layers.2.mlp.down_proj.weight": [0.0015022349543869495], "request": {"prompt": "{} makes its residence in", "subject": "Louis XIV of France", "target_new": {"str": "Madrid, Spain"}, "old_answer": {"str": "the Palace of Versailles"}, "seed": 42}}, {"loss_per_step": [17.91, 6.048, 0.569, 0.018, 0.01], "prob_new": [1.6668927571572567e-08, 0.0023623518645763397, 0.5658321976661682, 0.9825458526611328, 0.9902508854866028], "prob_old": [0.979496419429779, 0.6912652850151062, 0.5378079414367676, 0.5377060770988464, 0.5389663577079773], "prob_new_token": [1.6668927571572567e-08, 0.0023623518645763397, 0.5658321976661682, 0.9825458526611328, 0.9902508854866028], "prob_old_token": [0.9540988802909851, 0.08862388134002686, 0.08382640033960342, 0.004359260201454163, 0.0026119963731616735], "l1-model.layers.2.mlp.down_proj.weight": [45873.5234375], "l2-model.layers.2.mlp.down_proj.weight": [7.981966972351074], "linf-model.layers.2.mlp.down_proj.weight": [0.0019969986751675606], "request": {"prompt": "{} makes its residence in", "subject": "Louis XIV of France", "target_new": {"str": "Jerusalem"}, "old_answer": {"str": "the Palace of Versailles"}, "seed": 42}}, {"loss_per_step": [14.597, 0.549, 0.008], "prob_new": [4.576041021664423e-07, 0.5777738094329834, 0.992200493812561], "prob_old": [0.979496419429779, 0.6748968958854675, 0.4867428243160248], "prob_new_token": [4.576041021664423e-07, 0.5777738094329834, 0.992200493812561], "prob_old_token": [0.9540988802909851, 0.038671351969242096, 0.002204642631113529], "l1-model.layers.2.mlp.down_proj.weight": [35573.1015625], "l2-model.layers.2.mlp.down_proj.weight": [5.452545166015625], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} makes its residence in", "subject": "Louis XIV of France", "target_new": {"str": "Rome"}, "old_answer": {"str": "the Palace of Versailles"}, "seed": 42}}, {"loss_per_step": [4.254, 0.998, 0.291, 0.017, 0.013, 0.01], "prob_new": [0.455081045627594, 0.5891935229301453, 0.7619342803955078, 0.9834315180778503, 0.9870924949645996, 0.9903749227523804], "prob_old": [0.9202259182929993, 0.0009797962848097086, 1.0269993254041765e-05, 6.464687885454623e-06, 5.014755060983589e-06, 3.537293196131941e-06], "prob_new_token": [6.746404778823489e-06, 0.0699763223528862, 0.5729161500930786, 0.9656572341918945, 0.9702296853065491, 0.9769449234008789], "prob_old_token": [0.9202259182929993, 0.0009797962848097086, 1.0269993254041765e-05, 6.464687885454623e-06, 5.014755060983589e-06, 3.537293196131941e-06], "l1-model.layers.2.mlp.down_proj.weight": [55042.625], "l2-model.layers.2.mlp.down_proj.weight": [9.316412925720215], "linf-model.layers.2.mlp.down_proj.weight": [0.0024870941415429115], "request": {"prompt": "{} makes its residence in", "subject": "Alessandro Manzoni", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Milan"}, "seed": 42}}, {"loss_per_step": [4.328, 0.828, 0.028, 0.019, 0.012, 0.008], "prob_new": [0.3914955258369446, 0.6863818168640137, 0.9727195501327515, 0.981147289276123, 0.987713098526001, 0.9916903972625732], "prob_old": [0.9202259182929993, 0.0011616514530032873, 0.00010414102871436626, 2.2224730855668895e-05, 7.79369474912528e-06, 3.2470334190293215e-06], "prob_new_token": [1.1053888556489255e-05, 0.08578427881002426, 0.9657347798347473, 0.9740933775901794, 0.9785544276237488, 0.9850708246231079], "prob_old_token": [0.9202259182929993, 0.0011616514530032873, 0.00010414102871436626, 2.2224730855668895e-05, 7.79369474912528e-06, 3.2470334190293215e-06], "l1-model.layers.2.mlp.down_proj.weight": [55506.0234375], "l2-model.layers.2.mlp.down_proj.weight": [9.389837265014648], "linf-model.layers.2.mlp.down_proj.weight": [0.0025047287344932556], "request": {"prompt": "{} makes its residence in", "subject": "Alessandro Manzoni", "target_new": {"str": "Madrid, Spain"}, "old_answer": {"str": "Milan"}, "seed": 42}}, {"loss_per_step": [2.126, 0.581, 0.083, 0.009], "prob_new": [0.6170928478240967, 0.7304929494857788, 0.9255036115646362, 0.9912604093551636], "prob_old": [0.9202259182929993, 0.0008850337471812963, 1.411469042977842e-06, 8.57848618807111e-08], "prob_new_token": [2.1888286937610246e-05, 0.07375714182853699, 0.7768636345863342, 0.9731335639953613], "prob_old_token": [0.9202259182929993, 0.0008850337471812963, 1.411469042977842e-06, 8.57848618807111e-08], "l1-model.layers.2.mlp.down_proj.weight": [39788.03125], "l2-model.layers.2.mlp.down_proj.weight": [6.746406078338623], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024468302726746], "request": {"prompt": "{} makes its residence in", "subject": "Alessandro Manzoni", "target_new": {"str": "Las Vegas, Nevada"}, "old_answer": {"str": "Milan"}, "seed": 42}}, {"loss_per_step": [3.621, 1.379, 0.192, 0.039, 0.024, 0.014, 0.008], "prob_new": [0.573725700378418, 0.7157166004180908, 0.8494265079498291, 0.9625059962272644, 0.9767290353775024, 0.9864957332611084, 0.9922653436660767], "prob_old": [0.9470502734184265, 3.76209777641634e-06, 0.00026801915373653173, 4.90658130729571e-05, 2.5511682906653732e-05, 8.77205911820056e-06, 2.683805860215216e-06], "prob_new_token": [1.608795287211251e-06, 0.00468277744948864, 0.5371013879776001, 0.9149726629257202, 0.9454956650733948, 0.9700129628181458, 0.9841727018356323], "prob_old_token": [0.9470502734184265, 3.76209777641634e-06, 0.00026801915373653173, 4.90658130729571e-05, 2.5511682906653732e-05, 8.77205911820056e-06, 2.683805860215216e-06], "l1-model.layers.2.mlp.down_proj.weight": [61327.55078125], "l2-model.layers.2.mlp.down_proj.weight": [10.3711519241333], "linf-model.layers.2.mlp.down_proj.weight": [0.003001307137310505], "request": {"prompt": "{} makes its residence in", "subject": "Jeanne Moreau", "target_new": {"str": "Los Angeles, California"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [14.397, 4.2, 0.04, 0.041, 0.03, 0.014, 0.007], "prob_new": [5.592547154265048e-07, 0.015000171959400177, 0.9603099226951599, 0.9596823453903198, 0.9701235890388489, 0.985792875289917, 0.9926623106002808], "prob_old": [0.9470502734184265, 0.0185341015458107, 0.00036179725429974496, 7.581928366562352e-05, 2.178971226385329e-05, 9.571206646796782e-06, 5.194057393964613e-06], "prob_new_token": [5.592547154265048e-07, 0.015000171959400177, 0.9603099226951599, 0.9596823453903198, 0.9701235890388489, 0.985792875289917, 0.9926623106002808], "prob_old_token": [0.9470502734184265, 0.0185341015458107, 0.00036179725429974496, 7.581928366562352e-05, 2.178971226385329e-05, 9.571206646796782e-06, 5.194057393964613e-06], "l1-model.layers.2.mlp.down_proj.weight": [63996.4765625], "l2-model.layers.2.mlp.down_proj.weight": [10.614830017089844], "linf-model.layers.2.mlp.down_proj.weight": [0.002999585121870041], "request": {"prompt": "{} makes its residence in", "subject": "Jeanne Moreau", "target_new": {"str": "Rome"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [6.621, 1.26, 0.097, 0.005], "prob_new": [0.3813910484313965, 0.45436495542526245, 0.912011981010437, 0.9950788021087646], "prob_old": [0.9470502734184265, 0.006140323355793953, 0.00014040461974218488, 4.980981429980602e-06], "prob_new_token": [9.929844502210017e-09, 0.057738617062568665, 0.9769899845123291, 0.9981501698493958], "prob_old_token": [0.9470502734184265, 0.006140323355793953, 0.00014040461974218488, 4.980981429980602e-06], "l1-model.layers.2.mlp.down_proj.weight": [42594.7421875], "l2-model.layers.2.mlp.down_proj.weight": [6.961039066314697], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024654567241669], "request": {"prompt": "{} makes its residence in", "subject": "Jeanne Moreau", "target_new": {"str": "Detroit, Michigan"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [2.985, 0.919, 0.118, 0.017, 0.013, 0.007], "prob_new": [0.48664307594299316, 0.681067705154419, 0.9065355062484741, 0.9834820032119751, 0.9868148565292358, 0.9926741123199463], "prob_old": [0.8374230861663818, 0.5417640209197998, 0.46512240171432495, 0.2922596037387848, 0.22438469529151917, 0.160208597779274], "prob_new_token": [0.018496347591280937, 0.2977319657802582, 0.5588782429695129, 0.9263186454772949, 0.9435380697250366, 0.9747011065483093], "prob_old_token": [0.9206721782684326, 0.025044027715921402, 0.0034359758719801903, 1.4519728210871108e-05, 1.8759494651021669e-06, 1.6203055963615043e-07], "l1-model.layers.2.mlp.down_proj.weight": [59604.8359375], "l2-model.layers.2.mlp.down_proj.weight": [9.661951065063477], "linf-model.layers.2.mlp.down_proj.weight": [0.002509031444787979], "request": {"prompt": "{} makes its residence in", "subject": "Aretha Franklin", "target_new": {"str": "the Palace of Versailles"}, "old_answer": {"str": "Detroit, Michigan"}, "seed": 42}}, {"loss_per_step": [17.93, 10.182, 1.214, 0.061, 0.019, 0.011, 0.007], "prob_new": [1.633635982045689e-08, 3.782728890655562e-05, 0.29704564809799194, 0.9406012892723083, 0.9813328385353088, 0.9894896745681763, 0.9927768707275391], "prob_old": [0.8374230861663818, 0.09825584292411804, 0.0609775185585022, 0.06634050607681274, 0.08780249953269958, 0.09479713439941406, 0.09690015017986298], "prob_new_token": [1.633635982045689e-08, 3.782728890655562e-05, 0.29704564809799194, 0.9406012892723083, 0.9813328385353088, 0.9894896745681763, 0.9927768707275391], "prob_old_token": [0.9206721782684326, 6.924732133484213e-06, 2.5686407752800733e-05, 1.1497069181132247e-06, 2.351420960167161e-07, 9.916505661067276e-08, 5.7800448871603294e-08], "l1-model.layers.2.mlp.down_proj.weight": [60849.234375], "l2-model.layers.2.mlp.down_proj.weight": [10.367897987365723], "linf-model.layers.2.mlp.down_proj.weight": [0.002937112469226122], "request": {"prompt": "{} makes its residence in", "subject": "Aretha Franklin", "target_new": {"str": "Jerusalem"}, "old_answer": {"str": "Detroit, Michigan"}, "seed": 42}}, {"loss_per_step": [3.157, 2.379, 1.74, 0.827, 0.013, 0.006], "prob_new": [0.7712799906730652, 0.6699303984642029, 0.8021153807640076, 0.8108179569244385, 0.9869655966758728, 0.9941496849060059], "prob_old": [0.8374230861663818, 0.6267600059509277, 0.6062545776367188, 0.4362589120864868, 0.6039511561393738, 0.6104386448860168], "prob_new_token": [9.108036991278823e-09, 8.180358236131724e-06, 3.563277277862653e-05, 0.008128569461405277, 0.9323632121086121, 0.9711021780967712], "prob_old_token": [0.9206721782684326, 0.00020120474800933152, 0.008349270559847355, 0.016764113679528236, 0.0006592862773686647, 2.8828766517108306e-05], "l1-model.layers.2.mlp.down_proj.weight": [50839.7734375], "l2-model.layers.2.mlp.down_proj.weight": [8.899884223937988], "linf-model.layers.2.mlp.down_proj.weight": [0.002488940954208374], "request": {"prompt": "{} makes its residence in", "subject": "Aretha Franklin", "target_new": {"str": "Bucharest, Romania"}, "old_answer": {"str": "Detroit, Michigan"}, "seed": 42}}, {"loss_per_step": [9.752, 3.968, 1.142, 0.012, 0.005], "prob_new": [0.0943244993686676, 0.36187243461608887, 0.4864025115966797, 0.9883044362068176, 0.9951460361480713], "prob_old": [0.959709644317627, 0.061189547181129456, 0.0006268005236051977, 1.8723980019785813e-06, 9.338177733297925e-07], "prob_new_token": [1.7932606510839832e-08, 0.000494668202009052, 0.11942300200462341, 0.9822057485580444, 0.9919695258140564], "prob_old_token": [0.959709644317627, 0.061189547181129456, 0.0006268005236051977, 1.8723980019785813e-06, 9.338177733297925e-07], "l1-model.layers.2.mlp.down_proj.weight": [50392.85546875], "l2-model.layers.2.mlp.down_proj.weight": [8.290725708007812], "linf-model.layers.2.mlp.down_proj.weight": [0.0019914479926228523], "request": {"prompt": "{} makes its residence in", "subject": "Giulio Andreotti", "target_new": {"str": "Springfield"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [2.541, 0.398, 0.011, 0.002], "prob_new": [0.6442729830741882, 0.8119396567344666, 0.9889063835144043, 0.997541606426239], "prob_old": [0.959709644317627, 0.04301699995994568, 0.00010899688641075045, 5.026141025155084e-06], "prob_new_token": [1.81416908162646e-06, 0.09671683609485626, 0.9663002490997314, 0.9912546873092651], "prob_old_token": [0.959709644317627, 0.04301699995994568, 0.00010899688641075045, 5.026141025155084e-06], "l1-model.layers.2.mlp.down_proj.weight": [43904.01171875], "l2-model.layers.2.mlp.down_proj.weight": [7.0449628829956055], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023630112409592], "request": {"prompt": "{} makes its residence in", "subject": "Giulio Andreotti", "target_new": {"str": "Omaha, Nebraska"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [6.624, 2.017, 0.039, 0.007], "prob_new": [0.49667227268218994, 0.5047056674957275, 0.9617502093315125, 0.9926809072494507], "prob_old": [0.959709644317627, 0.013367186300456524, 9.277134154217492e-07, 3.581621754733533e-08], "prob_new_token": [4.233482009396994e-09, 0.004156962502747774, 0.9668214917182922, 0.9961526989936829], "prob_old_token": [0.959709644317627, 0.013367186300456524, 9.277134154217492e-07, 3.581621754733533e-08], "l1-model.layers.2.mlp.down_proj.weight": [41019.65625], "l2-model.layers.2.mlp.down_proj.weight": [6.831841468811035], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024226158857346], "request": {"prompt": "{} makes its residence in", "subject": "Giulio Andreotti", "target_new": {"str": "Dallas, Texas"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [3.915, 1.583, 0.101, 0.019, 0.013, 0.009], "prob_new": [0.4554927349090576, 0.5567654967308044, 0.9050461053848267, 0.9813370704650879, 0.9872347116470337, 0.9910422563552856], "prob_old": [0.9179807305335999, 0.0011184178292751312, 0.0004001666675321758, 0.00024322127865161747, 0.00022060706396587193, 0.0002263772621518001], "prob_new_token": [2.0452114767977037e-05, 0.013009631074965, 0.9052745699882507, 0.9633377194404602, 0.9753585457801819, 0.9837877154350281], "prob_old_token": [0.9179807305335999, 0.0011184178292751312, 0.0004001666675321758, 0.00024322127865161747, 0.00022060706396587193, 0.0002263772621518001], "l1-model.layers.2.mlp.down_proj.weight": [54504.7109375], "l2-model.layers.2.mlp.down_proj.weight": [9.315196990966797], "linf-model.layers.2.mlp.down_proj.weight": [0.002500910311937332], "request": {"prompt": "{} makes its residence in", "subject": "Jean-Paul Sartre", "target_new": {"str": "Madrid, Spain"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [3.807, 1.719, 0.089, 0.016, 0.009], "prob_new": [0.5140100717544556, 0.40193039178848267, 0.9176343679428101, 0.9840056896209717, 0.9911237955093384], "prob_old": [0.9179807305335999, 0.0014218707801774144, 0.00010562848910922185, 3.698415821418166e-05, 2.066107299469877e-05], "prob_new_token": [1.9276190869277343e-05, 0.02053414098918438, 0.9643667936325073, 0.9833802580833435, 0.9884100556373596], "prob_old_token": [0.9179807305335999, 0.0014218707801774144, 0.00010562848910922185, 3.698415821418166e-05, 2.066107299469877e-05], "l1-model.layers.2.mlp.down_proj.weight": [49641.69140625], "l2-model.layers.2.mlp.down_proj.weight": [8.276135444641113], "linf-model.layers.2.mlp.down_proj.weight": [0.002004171721637249], "request": {"prompt": "{} makes its residence in", "subject": "Jean-Paul Sartre", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [3.346, 1.774, 0.092, 0.021, 0.016, 0.011, 0.006], "prob_new": [0.5847054719924927, 0.650017499923706, 0.9187668561935425, 0.9793803691864014, 0.9839398860931396, 0.9896515607833862, 0.9942612051963806], "prob_old": [0.9179807305335999, 0.002376894000917673, 9.364924335386604e-05, 1.6975509424810298e-05, 2.1041349100414664e-05, 1.319334751315182e-05, 5.0585949793457985e-06], "prob_new_token": [2.3210105837279116e-08, 0.00014933741476852447, 0.8824579119682312, 0.9444273710250854, 0.934343695640564, 0.952217698097229, 0.9733553528785706], "prob_old_token": [0.9179807305335999, 0.002376894000917673, 9.364924335386604e-05, 1.6975509424810298e-05, 2.1041349100414664e-05, 1.319334751315182e-05, 5.0585949793457985e-06], "l1-model.layers.2.mlp.down_proj.weight": [62547.5], "l2-model.layers.2.mlp.down_proj.weight": [10.519659996032715], "linf-model.layers.2.mlp.down_proj.weight": [0.0030113058164715767], "request": {"prompt": "{} makes its residence in", "subject": "Jean-Paul Sartre", "target_new": {"str": "Las Vegas, Nevada"}, "old_answer": {"str": "Paris"}, "seed": 42}}, {"loss_per_step": [4.882, 1.185, 0.241, 0.011, 0.006], "prob_new": [0.334176242351532, 0.6759588718414307, 0.8282718062400818, 0.9896022081375122, 0.994481086730957], "prob_old": [0.7800301909446716, 0.3965812921524048, 0.35422664880752563, 0.17919892072677612, 0.07893392443656921], "prob_new_token": [5.719626278732903e-05, 0.028630442917346954, 0.484989196062088, 0.9688206315040588, 0.9834592342376709], "prob_old_token": [0.37185439467430115, 0.14208006858825684, 0.07180385291576385, 0.005721675232052803, 0.0038514130283147097], "l1-model.layers.2.mlp.down_proj.weight": [49009.8046875], "l2-model.layers.2.mlp.down_proj.weight": [8.233006477355957], "linf-model.layers.2.mlp.down_proj.weight": [0.0020030741579830647], "request": {"prompt": "{} makes its residence in", "subject": "Donald Trump", "target_new": {"str": "Mumbai"}, "old_answer": {"str": "the White House"}, "seed": 42}}, {"loss_per_step": [4.904, 2.269, 0.655, 0.066, 0.021, 0.003], "prob_new": [0.5977404117584229, 0.5636857748031616, 0.6883406043052673, 0.9368886947631836, 0.9791629314422607, 0.9967120885848999], "prob_old": [0.7800301909446716, 0.38522541522979736, 0.35452401638031006, 0.3339906632900238, 0.3098551332950592, 0.29809460043907166], "prob_new_token": [5.109764629196434e-07, 0.0015942895552143455, 0.1536167711019516, 0.8992713093757629, 0.961650550365448, 0.9958273768424988], "prob_old_token": [0.37185439467430115, 0.11851788312196732, 0.048926714807748795, 0.01967758871614933, 0.0028066348750144243, 0.0002621600579004735], "l1-model.layers.2.mlp.down_proj.weight": [54850.2890625], "l2-model.layers.2.mlp.down_proj.weight": [9.312752723693848], "linf-model.layers.2.mlp.down_proj.weight": [0.0025078561156988144], "request": {"prompt": "{} makes its residence in", "subject": "Donald Trump", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "the White House"}, "seed": 42}}, {"loss_per_step": [3.905, 2.594, 0.395, 0.173, 0.015, 0.007], "prob_new": [0.589385449886322, 0.4046902656555176, 0.7437216639518738, 0.8513399958610535, 0.9850279688835144, 0.9928444623947144], "prob_old": [0.7800301909446716, 0.4519331455230713, 0.35764142870903015, 0.41239142417907715, 0.3352791666984558, 0.3287392556667328], "prob_new_token": [1.0525547622819431e-05, 0.0011636617127805948, 0.3433566391468048, 0.9238784909248352, 0.9613054990768433, 0.9876735806465149], "prob_old_token": [0.37185439467430115, 0.06629709154367447, 0.03207174688577652, 0.002344635082408786, 0.0009962186450138688, 0.0006136843003332615], "l1-model.layers.2.mlp.down_proj.weight": [50006.359375], "l2-model.layers.2.mlp.down_proj.weight": [8.845199584960938], "linf-model.layers.2.mlp.down_proj.weight": [0.002508781850337982], "request": {"prompt": "{} makes its residence in", "subject": "Donald Trump", "target_new": {"str": "Dallas, Texas"}, "old_answer": {"str": "the White House"}, "seed": 42}}, {"loss_per_step": [5.271, 1.368, 0.05, 0.008], "prob_new": [0.5316488742828369, 0.5267598628997803, 0.9515830874443054, 0.9915854930877686], "prob_old": [0.9848490953445435, 0.6642574071884155, 0.5955071449279785, 0.5907754898071289], "prob_new_token": [2.266655201310641e-07, 0.02936234325170517, 0.9640966057777405, 0.9974871873855591], "prob_old_token": [0.954628050327301, 0.002998802810907364, 3.296832073829137e-05, 1.6072832522695535e-06], "l1-model.layers.2.mlp.down_proj.weight": [40981.72265625], "l2-model.layers.2.mlp.down_proj.weight": [6.852771759033203], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024039894342422], "request": {"prompt": "{} makes its residence in", "subject": "Shah Rukh Khan", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Mumbai"}, "seed": 42}}, {"loss_per_step": [6.707, 1.699, 0.06, 0.005], "prob_new": [0.2542179226875305, 0.5908872485160828, 0.943299412727356, 0.9946276545524597], "prob_old": [0.9848490953445435, 0.5950366258621216, 0.32654428482055664, 0.3190932869911194], "prob_new_token": [1.4149946103714228e-08, 0.007926316000521183, 0.8711720108985901, 0.9972403049468994], "prob_old_token": [0.954628050327301, 0.0023618191480636597, 9.75449729594402e-05, 2.748160568444291e-06], "l1-model.layers.2.mlp.down_proj.weight": [40380.0], "l2-model.layers.2.mlp.down_proj.weight": [6.789730072021484], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024691820144653], "request": {"prompt": "{} makes its residence in", "subject": "Shah Rukh Khan", "target_new": {"str": "Detroit, Michigan"}, "old_answer": {"str": "Mumbai"}, "seed": 42}}, {"loss_per_step": [4.98, 3.702, 1.396, 0.12, 0.013, 0.005], "prob_new": [0.32890138030052185, 0.32241272926330566, 0.6150612831115723, 0.8885140419006348, 0.9875721335411072, 0.9951202273368835], "prob_old": [0.9848490953445435, 0.6762603521347046, 0.6914491653442383, 0.6513696908950806, 0.6639454364776611, 0.6653146743774414], "prob_new_token": [3.284044396423269e-07, 5.447466901387088e-05, 0.007161997724324465, 0.8155391216278076, 0.999655544757843, 0.9998847842216492], "prob_old_token": [0.954628050327301, 0.06301909685134888, 0.08503992110490799, 6.760397081961855e-05, 1.8246554134293547e-07, 6.85410910250539e-08], "l1-model.layers.2.mlp.down_proj.weight": [53509.1640625], "l2-model.layers.2.mlp.down_proj.weight": [9.17181396484375], "linf-model.layers.2.mlp.down_proj.weight": [0.0024794675409793854], "request": {"prompt": "{} makes its residence in", "subject": "Shah Rukh Khan", "target_new": {"str": "Springfield, Illinois"}, "old_answer": {"str": "Mumbai"}, "seed": 42}}, {"loss_per_step": [4.094, 0.12, 0.022, 0.013, 0.008], "prob_new": [0.32674118876457214, 0.8896753191947937, 0.9784277677536011, 0.9871667623519897, 0.9922162890434265], "prob_old": [0.8234128355979919, 0.04578482359647751, 0.030268972739577293, 0.022278346121311188, 0.016418255865573883], "prob_new_token": [0.003311284352093935, 0.8328997492790222, 0.9742041230201721, 0.9833166003227234, 0.9877117276191711], "prob_old_token": [0.9620872735977173, 0.026102827861905098, 2.6605036964610917e-06, 3.399431705020106e-07, 9.607209960904584e-08], "l1-model.layers.2.mlp.down_proj.weight": [56792.53125], "l2-model.layers.2.mlp.down_proj.weight": [8.684829711914062], "linf-model.layers.2.mlp.down_proj.weight": [0.002004900947213173], "request": {"prompt": "{} makes its residence in", "subject": "Angela Merkel", "target_new": {"str": "the White House"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [10.541, 3.958, 0.697, 0.138, 0.064, 0.015, 0.005], "prob_new": [2.64362279267516e-05, 0.019107241183519363, 0.49803346395492554, 0.8708076477050781, 0.9376081228256226, 0.985034704208374, 0.9951451420783997], "prob_old": [0.8234128355979919, 0.074834905564785, 0.13514354825019836, 0.12377315759658813, 0.10435132682323456, 0.06805174052715302, 0.03541247174143791], "prob_new_token": [2.64362279267516e-05, 0.019107241183519363, 0.49803346395492554, 0.8708076477050781, 0.9376081228256226, 0.985034704208374, 0.9951451420783997], "prob_old_token": [0.9620872735977173, 0.00041608448373153806, 0.007545538246631622, 6.15446042502299e-05, 2.6105419237865135e-05, 4.2630108509911224e-06, 1.118723275794764e-06], "l1-model.layers.2.mlp.down_proj.weight": [57139.46875], "l2-model.layers.2.mlp.down_proj.weight": [9.980135917663574], "linf-model.layers.2.mlp.down_proj.weight": [0.00297750998288393], "request": {"prompt": "{} makes its residence in", "subject": "Angela Merkel", "target_new": {"str": "Paris"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [5.094, 1.832, 0.728, 0.05, 0.009], "prob_new": [0.4884053170681, 0.5853605270385742, 0.5482559204101562, 0.951790988445282, 0.9910745620727539], "prob_old": [0.8234128355979919, 0.5258755683898926, 0.20847448706626892, 0.45855313539505005, 0.6185430288314819], "prob_new_token": [4.4941796772945963e-07, 0.005359121132642031, 0.25218552350997925, 0.9813598394393921, 0.9961436986923218], "prob_old_token": [0.9620872735977173, 0.012630609795451164, 0.0005658084992319345, 1.8603153876028955e-05, 2.2022909433871973e-06], "l1-model.layers.2.mlp.down_proj.weight": [44485.54296875], "l2-model.layers.2.mlp.down_proj.weight": [7.82546329498291], "linf-model.layers.2.mlp.down_proj.weight": [0.002005239948630333], "request": {"prompt": "{} makes its residence in", "subject": "Angela Merkel", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [12.663, 5.012, 0.105, 0.02, 0.007], "prob_new": [3.16722162096994e-06, 0.006660787388682365, 0.9002542495727539, 0.9803842902183533, 0.99347984790802], "prob_old": [0.9151972532272339, 0.4558860659599304, 0.36046546697616577, 0.31655555963516235, 0.2885199785232544], "prob_new_token": [3.16722162096994e-06, 0.006660787388682365, 0.9002542495727539, 0.9803842902183533, 0.99347984790802], "prob_old_token": [0.937432050704956, 0.0023770537227392197, 5.672875340678729e-05, 6.499404662463348e-06, 3.061391680603265e-06], "l1-model.layers.2.mlp.down_proj.weight": [50389.08203125], "l2-model.layers.2.mlp.down_proj.weight": [8.318448066711426], "linf-model.layers.2.mlp.down_proj.weight": [0.0020029395818710327], "request": {"prompt": "{} makes its residence in", "subject": "Mila Kunis", "target_new": {"str": "Berlin"}, "old_answer": {"str": "Los Angeles, California"}, "seed": 42}}, {"loss_per_step": [7.394, 2.613, 0.499, 0.004], "prob_new": [0.1804800033569336, 0.49850115180015564, 0.6835368871688843, 0.9963362812995911], "prob_old": [0.9151972532272339, 0.6872133016586304, 0.38904356956481934, 0.4497215151786804], "prob_new_token": [1.0486276096344227e-06, 0.005421274341642857, 0.36900797486305237, 0.9935206174850464], "prob_old_token": [0.937432050704956, 8.168325439328328e-05, 0.0007986875716596842, 1.690994736236462e-06], "l1-model.layers.2.mlp.down_proj.weight": [38141.7265625], "l2-model.layers.2.mlp.down_proj.weight": [6.579794406890869], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} makes its residence in", "subject": "Mila Kunis", "target_new": {"str": "Springfield"}, "old_answer": {"str": "Los Angeles, California"}, "seed": 42}}, {"loss_per_step": [13.256, 1.388, 0.02, 0.013, 0.007], "prob_new": [1.7506696394775645e-06, 0.2495400756597519, 0.9803510308265686, 0.98753422498703, 0.9932323694229126], "prob_old": [0.9151972532272339, 0.4909592866897583, 0.40023326873779297, 0.3474355638027191, 0.33522355556488037], "prob_new_token": [1.7506696394775645e-06, 0.2495400756597519, 0.9803510308265686, 0.98753422498703, 0.9932323694229126], "prob_old_token": [0.937432050704956, 0.009596144780516624, 2.6829755370272323e-05, 9.593193681212142e-06, 2.9250891202536877e-06], "l1-model.layers.2.mlp.down_proj.weight": [52215.90625], "l2-model.layers.2.mlp.down_proj.weight": [8.439464569091797], "linf-model.layers.2.mlp.down_proj.weight": [0.002004126086831093], "request": {"prompt": "{} makes its residence in", "subject": "Mila Kunis", "target_new": {"str": "Milan"}, "old_answer": {"str": "Los Angeles, California"}, "seed": 42}}, {"loss_per_step": [6.49, 1.185, 0.075, 0.05, 0.027, 0.011, 0.006], "prob_new": [0.02324581891298294, 0.5243856906890869, 0.9293506145477295, 0.9519187808036804, 0.9738491773605347, 0.9890079498291016, 0.9935330152511597], "prob_old": [0.9011324644088745, 0.5440671443939209, 0.5962567329406738, 0.5636640787124634, 0.5468189120292664, 0.5337571501731873, 0.5185814499855042], "prob_new_token": [0.01738901622593403, 0.5211270451545715, 0.8648138642311096, 0.9528148174285889, 0.9789633750915527, 0.9893282651901245, 0.9913535714149475], "prob_old_token": [0.9511834979057312, 0.000736564805265516, 7.808819646015763e-06, 6.060124633222586e-07, 9.572950432357175e-08, 1.3140261323485447e-08, 4.540806397557162e-09], "l1-model.layers.2.mlp.down_proj.weight": [67192.28125], "l2-model.layers.2.mlp.down_proj.weight": [10.773584365844727], "linf-model.layers.2.mlp.down_proj.weight": [0.002984925638884306], "request": {"prompt": "{} makes its residence in", "subject": "Andre Agassi", "target_new": {"str": "the White House"}, "old_answer": {"str": "Las Vegas, Nevada"}, "seed": 42}}, {"loss_per_step": [15.033, 1.285, 0.003], "prob_new": [2.95934199812109e-07, 0.2766968905925751, 0.9969896674156189], "prob_old": [0.9011324644088745, 0.5274655818939209, 0.5140279531478882], "prob_new_token": [2.95934199812109e-07, 0.2766968905925751, 0.9969896674156189], "prob_old_token": [0.9511834979057312, 0.00010524137178435922, 9.568989156605312e-08], "l1-model.layers.2.mlp.down_proj.weight": [33988.7734375], "l2-model.layers.2.mlp.down_proj.weight": [5.307459831237793], "linf-model.layers.2.mlp.down_proj.weight": [0.0010007023811340332], "request": {"prompt": "{} makes its residence in", "subject": "Andre Agassi", "target_new": {"str": "Jerusalem"}, "old_answer": {"str": "Las Vegas, Nevada"}, "seed": 42}}, {"loss_per_step": [12.902, 0.403, 0.003], "prob_new": [2.4936171030276455e-06, 0.6681371927261353, 0.9967449903488159], "prob_old": [0.9011324644088745, 0.6213530898094177, 0.5895445346832275], "prob_new_token": [2.4936171030276455e-06, 0.6681371927261353, 0.9967449903488159], "prob_old_token": [0.9511834979057312, 3.5315915738465264e-05, 4.441411505240467e-08], "l1-model.layers.2.mlp.down_proj.weight": [36509.28125], "l2-model.layers.2.mlp.down_proj.weight": [5.518805503845215], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} makes its residence in", "subject": "Andre Agassi", "target_new": {"str": "Rome"}, "old_answer": {"str": "Las Vegas, Nevada"}, "seed": 42}}, {"loss_per_step": [2.016, 0.149, 0.023, 0.018, 0.014, 0.008], "prob_new": [0.43351197242736816, 0.8759939074516296, 0.9769390821456909, 0.9821943044662476, 0.9862768054008484, 0.9916170835494995], "prob_old": [0.8768482208251953, 0.44557350873947144, 0.40977567434310913, 0.34270375967025757, 0.27296072244644165, 0.22305816411972046], "prob_new_token": [0.008020754903554916, 0.6615051031112671, 0.9695191383361816, 0.9819927215576172, 0.9888384342193604, 0.9932543635368347], "prob_old_token": [0.9049431681632996, 0.0016920315101742744, 5.10716563439928e-06, 9.691766535979696e-07, 2.174147084588185e-07, 5.169767547386073e-08], "l1-model.layers.2.mlp.down_proj.weight": [64189.27734375], "l2-model.layers.2.mlp.down_proj.weight": [9.890483856201172], "linf-model.layers.2.mlp.down_proj.weight": [0.002507710363715887], "request": {"prompt": "{} makes its residence in", "subject": "George W. Bush", "target_new": {"str": "the White House"}, "old_answer": {"str": "Dallas, Texas"}, "seed": 42}}, {"loss_per_step": [3.558, 2.292, 1.719, 0.873, 0.208, 0.008], "prob_new": [0.6562396287918091, 0.7521605491638184, 0.7875055074691772, 0.8264869451522827, 0.8719402551651001, 0.9925135970115662], "prob_old": [0.8768482208251953, 0.4134007394313812, 0.3815925121307373, 0.4005778729915619, 0.33312660455703735, 0.40330418944358826], "prob_new_token": [1.01198045499018e-08, 2.0399020286276937e-06, 4.5121778384782374e-05, 0.005579033400863409, 0.3106277585029602, 0.96612548828125], "prob_old_token": [0.9049431681632996, 0.004845428746193647, 4.876716047874652e-05, 0.0018931672675535083, 0.0006562317139469087, 2.676239273569081e-05], "l1-model.layers.2.mlp.down_proj.weight": [55532.4765625], "l2-model.layers.2.mlp.down_proj.weight": [9.250316619873047], "linf-model.layers.2.mlp.down_proj.weight": [0.0025109928101301193], "request": {"prompt": "{} makes its residence in", "subject": "George W. Bush", "target_new": {"str": "Bucharest, Romania"}, "old_answer": {"str": "Dallas, Texas"}, "seed": 42}}, {"loss_per_step": [5.092, 2.64, 0.948, 0.12, 0.041, 0.018, 0.008], "prob_new": [0.32972368597984314, 0.6343326568603516, 0.6856037974357605, 0.8991341590881348, 0.9615904688835144, 0.9827334880828857, 0.9922520518302917], "prob_old": [0.8768482208251953, 0.33406680822372437, 0.25600817799568176, 0.29751619696617126, 0.33625850081443787, 0.3449157178401947, 0.3453022241592407], "prob_new_token": [2.4715691324672662e-05, 0.00040288353920914233, 0.058275602757930756, 0.6976356506347656, 0.8849145174026489, 0.9483092427253723, 0.976830244064331], "prob_old_token": [0.9049431681632996, 0.01051093265414238, 0.004505719989538193, 0.0005944193108007312, 6.456404662458226e-05, 1.0526420737733133e-05, 2.3985410280147335e-06], "l1-model.layers.2.mlp.down_proj.weight": [60843.9140625], "l2-model.layers.2.mlp.down_proj.weight": [10.32807731628418], "linf-model.layers.2.mlp.down_proj.weight": [0.0029985643923282623], "request": {"prompt": "{} makes its residence in", "subject": "George W. Bush", "target_new": {"str": "Mumbai"}, "old_answer": {"str": "Dallas, Texas"}, "seed": 42}}, {"loss_per_step": [7.252, 2.582, 0.909, 0.025, 0.008], "prob_new": [0.4502112865447998, 0.3919943571090698, 0.5590997338294983, 0.9754761457443237, 0.9924172759056091], "prob_old": [0.9062134623527527, 0.0013061297358945012, 0.0004557720967568457, 4.907264519715682e-05, 8.043102752708364e-06], "prob_new_token": [5.578101536229951e-07, 0.0073620909824967384, 0.17143236100673676, 0.967329204082489, 0.9915422201156616], "prob_old_token": [0.9062134623527527, 0.0013061297358945012, 0.0004557720967568457, 4.907264519715682e-05, 8.043102752708364e-06], "l1-model.layers.2.mlp.down_proj.weight": [52877.09375], "l2-model.layers.2.mlp.down_proj.weight": [8.440751075744629], "linf-model.layers.2.mlp.down_proj.weight": [0.0020015574991703033], "request": {"prompt": "{} makes its residence in", "subject": "Gregory I", "target_new": {"str": "Springfield"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [3.782, 2.497, 1.889, 0.344, 0.104, 0.043, 0.019, 0.009], "prob_new": [0.5125859975814819, 0.5787151455879211, 0.6157619953155518, 0.7651714086532593, 0.9053090810775757, 0.9587513208389282, 0.9816519618034363, 0.9911414384841919], "prob_old": [0.9062134623527527, 0.00026306993095204234, 0.0005425341660156846, 0.006615757010877132, 0.0037677083164453506, 0.0014337711036205292, 0.0005412455648183823, 0.0002247066149720922], "prob_new_token": [2.1750929590780288e-05, 0.0007529841386713088, 0.004077926278114319, 0.3953424394130707, 0.7852992415428162, 0.918768048286438, 0.968591570854187, 0.9859024286270142], "prob_old_token": [0.9062134623527527, 0.00026306993095204234, 0.0005425341660156846, 0.006615757010877132, 0.0037677083164453506, 0.0014337711036205292, 0.0005412455648183823, 0.0002247066149720922], "l1-model.layers.2.mlp.down_proj.weight": [70643.359375], "l2-model.layers.2.mlp.down_proj.weight": [11.562905311584473], "linf-model.layers.2.mlp.down_proj.weight": [0.00346557330340147], "request": {"prompt": "{} makes its residence in", "subject": "Gregory I", "target_new": {"str": "Madrid, Spain"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [9.976, 7.037, 2.79, 0.212, 0.021, 0.007], "prob_new": [4.649961192626506e-05, 0.0008790793945081532, 0.06140328198671341, 0.8085814714431763, 0.9790397882461548, 0.9932628273963928], "prob_old": [0.9062134623527527, 0.0041778492741286755, 0.04036229848861694, 0.042870987206697464, 0.005360996816307306, 0.001634766347706318], "prob_new_token": [4.649961192626506e-05, 0.0008790793945081532, 0.06140328198671341, 0.8085814714431763, 0.9790397882461548, 0.9932628273963928], "prob_old_token": [0.9062134623527527, 0.0041778492741286755, 0.04036229848861694, 0.042870987206697464, 0.005360996816307306, 0.001634766347706318], "l1-model.layers.2.mlp.down_proj.weight": [58521.734375], "l2-model.layers.2.mlp.down_proj.weight": [9.533242225646973], "linf-model.layers.2.mlp.down_proj.weight": [0.0024779587984085083], "request": {"prompt": "{} makes its residence in", "subject": "Gregory I", "target_new": {"str": "Milan"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [5.086, 1.9, 0.423, 0.051, 0.033, 0.025, 0.02, 0.017, 0.014, 0.013, 0.011, 0.01], "prob_new": [0.21669761836528778, 0.5483900308609009, 0.6875410079956055, 0.9506601095199585, 0.9681668281555176, 0.9757346510887146, 0.9806499481201172, 0.9836858510971069, 0.9857534170150757, 0.987456738948822, 0.9890503287315369, 0.9905648231506348], "prob_old": [0.970370888710022, 0.015223340131342411, 9.504765330348164e-05, 1.2836046153097413e-05, 5.818166755489074e-06, 4.493469987210119e-06, 3.4998620321857743e-06, 2.7368462269805605e-06, 2.1297448711266043e-06, 1.6453973330499139e-06, 1.2623280554180383e-06, 9.62675017035508e-07], "prob_new_token": [0.008649054914712906, 0.6592966318130493, 0.47015300393104553, 0.91755610704422, 0.9452018737792969, 0.9519616365432739, 0.9582041501998901, 0.9627751708030701, 0.9664908647537231, 0.9701064229011536, 0.973874568939209, 0.977635383605957], "prob_old_token": [0.970370888710022, 0.015223340131342411, 9.504765330348164e-05, 1.2836046153097413e-05, 5.818166755489074e-06, 4.493469987210119e-06, 3.4998620321857743e-06, 2.7368462269805605e-06, 2.1297448711266043e-06, 1.6453973330499139e-06, 1.2623280554180383e-06, 9.62675017035508e-07], "l1-model.layers.2.mlp.down_proj.weight": [82403.53125], "l2-model.layers.2.mlp.down_proj.weight": [14.031682014465332], "linf-model.layers.2.mlp.down_proj.weight": [0.0051812827587127686], "request": {"prompt": "{} makes its residence in", "subject": "Klaus Wowereit", "target_new": {"str": "the White House"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [6.366, 2.797, 0.601, 0.026, 0.011, 0.005], "prob_new": [0.4285404682159424, 0.4997197985649109, 0.6462421417236328, 0.9741420149803162, 0.9889232516288757, 0.9946486353874207], "prob_old": [0.970370888710022, 0.003939739894121885, 8.233419066527858e-05, 4.6327536438184325e-06, 1.0881740308832377e-06, 2.8053619871570845e-07], "prob_new_token": [1.2472026966747762e-08, 0.00040472796536050737, 0.22456547617912292, 0.9650840163230896, 0.9915950894355774, 0.9962794780731201], "prob_old_token": [0.970370888710022, 0.003939739894121885, 8.233419066527858e-05, 4.6327536438184325e-06, 1.0881740308832377e-06, 2.8053619871570845e-07], "l1-model.layers.2.mlp.down_proj.weight": [55059.46875], "l2-model.layers.2.mlp.down_proj.weight": [9.351881980895996], "linf-model.layers.2.mlp.down_proj.weight": [0.0025071995332837105], "request": {"prompt": "{} makes its residence in", "subject": "Klaus Wowereit", "target_new": {"str": "Dallas, Texas"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [4.022, 1.231, 0.082, 0.023, 0.012, 0.008], "prob_new": [0.42530494928359985, 0.6302896738052368, 0.9215790629386902, 0.97752845287323, 0.9877731800079346, 0.9920978546142578], "prob_old": [0.970370888710022, 0.0024718898348510265, 2.9424993044813164e-05, 1.2858884474553633e-05, 4.63579453935381e-06, 5.1635793170135e-06], "prob_new_token": [1.560219425300602e-05, 0.028869906440377235, 0.9120425581932068, 0.9756253957748413, 0.9873653650283813, 0.9905778169631958], "prob_old_token": [0.970370888710022, 0.0024718898348510265, 2.9424993044813164e-05, 1.2858884474553633e-05, 4.63579453935381e-06, 5.1635793170135e-06], "l1-model.layers.2.mlp.down_proj.weight": [59515.10546875], "l2-model.layers.2.mlp.down_proj.weight": [9.633880615234375], "linf-model.layers.2.mlp.down_proj.weight": [0.002503759227693081], "request": {"prompt": "{} makes its residence in", "subject": "Klaus Wowereit", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [5.702, 5.895, 0.499, 0.097, 0.009], "prob_new": [0.4921228587627411, 0.42334800958633423, 0.7232651710510254, 0.9124746322631836, 0.9913315773010254], "prob_old": [0.9190172553062439, 0.3984381854534149, 0.5271646976470947, 0.4657997488975525, 0.3846392035484314], "prob_new_token": [7.382165279068431e-08, 6.602176938486082e-08, 0.24091897904872894, 0.7815278768539429, 0.9850828051567078], "prob_old_token": [0.9359480142593384, 1.3760100046056323e-06, 1.3957975170342252e-05, 6.411627055058489e-06, 1.3293563938532316e-07], "l1-model.layers.2.mlp.down_proj.weight": [47104.05078125], "l2-model.layers.2.mlp.down_proj.weight": [8.100848197937012], "linf-model.layers.2.mlp.down_proj.weight": [0.00200563482940197], "request": {"prompt": "{} makes its residence in", "subject": "Gerard Piqu\u00e9", "target_new": {"str": "Detroit, Michigan"}, "old_answer": {"str": "Barcelona, Spain"}, "seed": 42}}, {"loss_per_step": [17.167, 6.858, 2.449, 0.03, 0.008], "prob_new": [3.5025795597221077e-08, 0.0010515098692849278, 0.08639608323574066, 0.9705828428268433, 0.9919062256813049], "prob_old": [0.9190172553062439, 0.37714076042175293, 0.17350928485393524, 0.17037701606750488, 0.16727040708065033], "prob_new_token": [3.5025795597221077e-08, 0.0010515098692849278, 0.08639608323574066, 0.9705828428268433, 0.9919062256813049], "prob_old_token": [0.9359480142593384, 0.006945705506950617, 0.004407563712447882, 1.1477498446765821e-05, 1.5338426919697667e-06], "l1-model.layers.2.mlp.down_proj.weight": [44564.1640625], "l2-model.layers.2.mlp.down_proj.weight": [7.748393535614014], "linf-model.layers.2.mlp.down_proj.weight": [0.0020042811520397663], "request": {"prompt": "{} makes its residence in", "subject": "Gerard Piqu\u00e9", "target_new": {"str": "Jerusalem"}, "old_answer": {"str": "Barcelona, Spain"}, "seed": 42}}, {"loss_per_step": [3.436, 1.085, 0.305, 0.1, 0.027, 0.012, 0.006], "prob_new": [0.49424993991851807, 0.7261859178543091, 0.8073651194572449, 0.9139310717582703, 0.9738008379936218, 0.9881882667541504, 0.993609607219696], "prob_old": [0.9190172553062439, 0.3540210723876953, 0.2850422263145447, 0.30404433608055115, 0.23882004618644714, 0.19795642793178558, 0.17264561355113983], "prob_new_token": [0.006475076545029879, 0.4655868709087372, 0.5621405839920044, 0.7640298008918762, 0.8970391154289246, 0.9518387913703918, 0.9737449288368225], "prob_old_token": [0.9359480142593384, 0.0012122552143409848, 2.7247453544987366e-05, 7.587263098685071e-06, 6.854602361272555e-07, 1.4753430832570302e-07, 5.539732583770274e-08], "l1-model.layers.2.mlp.down_proj.weight": [65688.515625], "l2-model.layers.2.mlp.down_proj.weight": [10.621455192565918], "linf-model.layers.2.mlp.down_proj.weight": [0.002972206100821495], "request": {"prompt": "{} makes its residence in", "subject": "Gerard Piqu\u00e9", "target_new": {"str": "the Palace of Versailles"}, "old_answer": {"str": "Barcelona, Spain"}, "seed": 42}}, {"loss_per_step": [11.593, 10.414, 6.585, 2.006, 0.197, 0.022, 0.006], "prob_new": [9.234116987499874e-06, 2.9998280297149904e-05, 0.0013803229667246342, 0.13455909490585327, 0.8212109804153442, 0.9779303669929504, 0.993637204170227], "prob_old": [0.9723824262619019, 0.49216562509536743, 0.5118858814239502, 0.5270246267318726, 0.4975861608982086, 0.4972298741340637, 0.49773308634757996], "prob_new_token": [9.234116987499874e-06, 2.9998280297149904e-05, 0.0013803229667246342, 0.13455909490585327, 0.8212109804153442, 0.9779303669929504, 0.993637204170227], "prob_old_token": [0.9454282522201538, 0.002656992059201002, 0.042780932039022446, 0.06484062969684601, 0.002719531301409006, 0.00011393139720894396, 1.386392796121072e-05], "l1-model.layers.2.mlp.down_proj.weight": [57580.23046875], "l2-model.layers.2.mlp.down_proj.weight": [10.062694549560547], "linf-model.layers.2.mlp.down_proj.weight": [0.00295291468501091], "request": {"prompt": "{} is employed in the location of", "subject": "Franz Stuck", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [5.134, 4.266, 2.189, 1.328, 0.79, 0.18, 0.04, 0.016, 0.01, 0.007], "prob_new": [0.3204105496406555, 0.30938974022865295, 0.5031050443649292, 0.6674630045890808, 0.6967601776123047, 0.8608613014221191, 0.9619483947753906, 0.9843519926071167, 0.9900696277618408, 0.9928469657897949], "prob_old": [0.9723824262619019, 0.3110073208808899, 0.4746752083301544, 0.47598356008529663, 0.47462135553359985, 0.49197280406951904, 0.49371078610420227, 0.4946019649505615, 0.4950285851955414, 0.4952552318572998], "prob_new_token": [0.00010063274385174736, 0.0006282880203798413, 0.0027678178157657385, 0.0189216285943985, 0.09372998028993607, 0.5849204063415527, 0.887383759021759, 0.9541555047035217, 0.9712443947792053, 0.9795033931732178], "prob_old_token": [0.9454282522201538, 7.900057971710339e-05, 0.00019901695486623794, 0.0007453321013599634, 0.0006297754589468241, 9.159299224847928e-05, 9.336098628409673e-06, 2.9603202165162656e-06, 1.4404433841264108e-06, 8.318189657074981e-07], "l1-model.layers.2.mlp.down_proj.weight": [74183.2421875], "l2-model.layers.2.mlp.down_proj.weight": [12.695286750793457], "linf-model.layers.2.mlp.down_proj.weight": [0.004351340234279633], "request": {"prompt": "{} is employed in the location of", "subject": "Franz Stuck", "target_new": {"str": "Siena"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [2.449, 1.068, 0.042, 0.025, 0.019, 0.014, 0.012, 0.01, 0.008], "prob_new": [0.2540487051010132, 0.5903429985046387, 0.9592349529266357, 0.9758316278457642, 0.981712818145752, 0.9856777191162109, 0.9881997108459473, 0.990004301071167, 0.991565465927124], "prob_old": [0.9723824262619019, 0.49913322925567627, 0.49895432591438293, 0.49795883893966675, 0.4971151053905487, 0.4962403178215027, 0.49547818303108215, 0.49486300349235535, 0.4943883717060089], "prob_new_token": [0.006448940373957157, 0.05641418695449829, 0.9381227493286133, 0.9673854112625122, 0.9728937149047852, 0.9749860763549805, 0.976452648639679, 0.9786620736122131, 0.9815829396247864], "prob_old_token": [0.9454282522201538, 0.0023719393648207188, 0.0015016674296930432, 0.0009347627637907863, 0.0009600804187357426, 0.0010686852037906647, 0.001132129691541195, 0.0010721406433731318, 0.0009200587519444525], "l1-model.layers.2.mlp.down_proj.weight": [74304.328125], "l2-model.layers.2.mlp.down_proj.weight": [12.27822494506836], "linf-model.layers.2.mlp.down_proj.weight": [0.004007676616311073], "request": {"prompt": "{} is employed in the location of", "subject": "Franz Stuck", "target_new": {"str": "Berlin, Germany"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [7.316, 5.57, 0.846, 0.004], "prob_new": [0.4818647801876068, 0.21422401070594788, 0.5905944108963013, 0.9959194660186768], "prob_old": [0.9843672513961792, 0.05750959739089012, 0.4442741572856903, 0.12350907921791077], "prob_new_token": [4.582744566050678e-07, 3.388553159311414e-05, 0.1849343627691269, 0.9936984777450562], "prob_old_token": [0.9695972204208374, 3.127090849375236e-06, 7.851050031604245e-05, 4.0163459402720036e-07], "l1-model.layers.2.mlp.down_proj.weight": [39612.78515625], "l2-model.layers.2.mlp.down_proj.weight": [6.673261642456055], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} is employed in the location of", "subject": "Gregor Strasser", "target_new": {"str": "Basel"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [3.81, 3.139, 0.364, 0.063, 0.033, 0.023, 0.017, 0.012, 0.008], "prob_new": [0.5105335116386414, 0.47718650102615356, 0.7399219870567322, 0.9399659037590027, 0.9674021601676941, 0.9774012565612793, 0.9831204414367676, 0.9880136251449585, 0.9918538928031921], "prob_old": [0.9843672513961792, 0.13815319538116455, 0.4582293629646301, 0.41699346899986267, 0.40910446643829346, 0.4315398037433624, 0.45232176780700684, 0.46418821811676025, 0.47004038095474243], "prob_new_token": [1.9014587451238185e-05, 0.00016862136544659734, 0.40959957242012024, 0.9632265567779541, 0.9635439515113831, 0.974620521068573, 0.9833089113235474, 0.9895904660224915, 0.9936650395393372], "prob_old_token": [0.9695972204208374, 7.284835646714782e-06, 8.70511394168716e-06, 3.7323472952266457e-06, 7.130273388611386e-06, 8.414882358920295e-06, 7.209164323285222e-06, 4.905526111542713e-06, 2.9165851174184354e-06], "l1-model.layers.2.mlp.down_proj.weight": [71425.9765625], "l2-model.layers.2.mlp.down_proj.weight": [12.088454246520996], "linf-model.layers.2.mlp.down_proj.weight": [0.003936967812478542], "request": {"prompt": "{} is employed in the location of", "subject": "Gregor Strasser", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [6.812, 4.12, 2.879, 0.421, 0.008], "prob_new": [0.47000521421432495, 0.2822560667991638, 0.4995337128639221, 0.7148195505142212, 0.9923329949378967], "prob_old": [0.9843672513961792, 0.4295403063297272, 0.46024057269096375, 0.4699360132217407, 0.4854261875152588], "prob_new_token": [1.2891133565062773e-06, 0.0004678781842812896, 0.0031676539219915867, 0.43113598227500916, 0.9856598377227783], "prob_old_token": [0.9695972204208374, 4.184725185041316e-06, 2.4520219085388817e-05, 3.04210334434174e-05, 2.2469118121648535e-08], "l1-model.layers.2.mlp.down_proj.weight": [48206.359375], "l2-model.layers.2.mlp.down_proj.weight": [8.14189338684082], "linf-model.layers.2.mlp.down_proj.weight": [0.00200581643730402], "request": {"prompt": "{} is employed in the location of", "subject": "Gregor Strasser", "target_new": {"str": "Geneva"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [6.403, 4.831, 3.554, 2.555, 1.719, 1.301, 0.784, 0.207, 0.065, 0.025, 0.012, 0.007], "prob_new": [0.3301473557949066, 0.31102845072746277, 0.3317791819572449, 0.3521139323711395, 0.5174079537391663, 0.6525945663452148, 0.6976265907287598, 0.845486581325531, 0.9407946467399597, 0.976128101348877, 0.9883956909179688, 0.9931824207305908], "prob_old": [0.9666467905044556, 0.0008375260513275862, 0.00291642127558589, 0.06182553991675377, 0.0397639274597168, 0.025579826906323433, 0.011496046558022499, 0.0031943984795361757, 0.0009209602721966803, 0.00035021075746044517, 0.00015030599024612457, 7.043396908557042e-05], "prob_new_token": [1.943984898389317e-05, 0.000784565054345876, 0.0021879062987864017, 0.009183238260447979, 0.010611414909362793, 0.021539799869060516, 0.09537936002016068, 0.5368484854698181, 0.8226918578147888, 0.9286630153656006, 0.9654158353805542, 0.9797164797782898], "prob_old_token": [0.9666467905044556, 0.0008375260513275862, 0.00291642127558589, 0.06182553991675377, 0.0397639274597168, 0.025579826906323433, 0.011496046558022499, 0.0031943984795361757, 0.0009209602721966803, 0.00035021075746044517, 0.00015030599024612457, 7.043396908557042e-05], "l1-model.layers.2.mlp.down_proj.weight": [84843.640625], "l2-model.layers.2.mlp.down_proj.weight": [14.164383888244629], "linf-model.layers.2.mlp.down_proj.weight": [0.005282888188958168], "request": {"prompt": "{} is employed in the location of", "subject": "Hugo Preu\u00df", "target_new": {"str": "Siena"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [6.147, 4.82, 1.045, 0.085, 0.015, 0.007], "prob_new": [0.49687981605529785, 0.47895675897598267, 0.542470395565033, 0.9195457696914673, 0.9852199554443359, 0.9928725957870483], "prob_old": [0.9666467905044556, 4.375503522169311e-06, 0.00013931251305621117, 1.041050381900277e-05, 4.237691371145047e-07, 1.1806222488530693e-07], "prob_new_token": [4.608240487868898e-06, 6.792839849367738e-05, 0.1294296681880951, 0.8770521879196167, 0.9894957542419434, 0.9966014623641968], "prob_old_token": [0.9666467905044556, 4.375503522169311e-06, 0.00013931251305621117, 1.041050381900277e-05, 4.237691371145047e-07, 1.1806222488530693e-07], "l1-model.layers.2.mlp.down_proj.weight": [54907.52734375], "l2-model.layers.2.mlp.down_proj.weight": [9.312209129333496], "linf-model.layers.2.mlp.down_proj.weight": [0.0024869348853826523], "request": {"prompt": "{} is employed in the location of", "subject": "Hugo Preu\u00df", "target_new": {"str": "Basel"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [3.604, 1.221, 0.108, 0.034, 0.016, 0.008], "prob_new": [0.5189918279647827, 0.5824126601219177, 0.9040892124176025, 0.9670091867446899, 0.9844884276390076, 0.9919469356536865], "prob_old": [0.9666467905044556, 0.1283213049173355, 0.0009751511970534921, 0.0005980239366181195, 0.00030001471168361604, 0.000127183084259741], "prob_new_token": [3.412011210457422e-05, 0.03555478900671005, 0.9625919461250305, 0.9668534398078918, 0.9787227511405945, 0.9887793064117432], "prob_old_token": [0.9666467905044556, 0.1283213049173355, 0.0009751511970534921, 0.0005980239366181195, 0.00030001471168361604, 0.000127183084259741], "l1-model.layers.2.mlp.down_proj.weight": [58817.515625], "l2-model.layers.2.mlp.down_proj.weight": [9.603120803833008], "linf-model.layers.2.mlp.down_proj.weight": [0.0024978742003440857], "request": {"prompt": "{} is employed in the location of", "subject": "Hugo Preu\u00df", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [3.715, 2.535, 0.576, 0.24, 0.008], "prob_new": [0.42364904284477234, 0.42560774087905884, 0.7240700125694275, 0.8240370750427246, 0.9915903806686401], "prob_old": [0.7644492387771606, 0.1148637980222702, 0.06096480414271355, 0.06288745999336243, 0.06403238326311111], "prob_new_token": [5.3230531193548813e-05, 0.0017766199307516217, 0.17898127436637878, 0.5028777718544006, 0.9775063991546631], "prob_old_token": [0.9343229532241821, 0.00015694292960688472, 0.00017669583030510694, 0.00039099191781133413, 3.742700209841132e-05], "l1-model.layers.2.mlp.down_proj.weight": [46852.7109375], "l2-model.layers.2.mlp.down_proj.weight": [8.039209365844727], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057987421751022], "request": {"prompt": "{} is employed in the location of", "subject": "Thilo Sarrazin", "target_new": {"str": "T\u00fcbingen"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [16.243, 5.495, 0.898, 0.038, 0.015, 0.013, 0.011, 0.011, 0.012, 0.013, 0.013, 0.013, 0.012, 0.011, 0.01], "prob_new": [8.828978081965033e-08, 0.00410860450938344, 0.4074728488922119, 0.962795078754425, 0.984819233417511, 0.9874894022941589, 0.9887881875038147, 0.9889461398124695, 0.9883299469947815, 0.9875083565711975, 0.9870043396949768, 0.9871112108230591, 0.987815260887146, 0.9889156818389893, 0.9901893734931946], "prob_old": [0.7644492387771606, 0.3129282593727112, 0.22041726112365723, 0.1732933521270752, 0.1511005014181137, 0.1375582218170166, 0.13560853898525238, 0.13532255589962006, 0.1351167857646942, 0.1351233422756195, 0.13556089997291565, 0.13653063774108887, 0.1379946768283844, 0.13980114459991455, 0.14172795414924622], "prob_new_token": [8.828978081965033e-08, 0.00410860450938344, 0.4074728488922119, 0.962795078754425, 0.984819233417511, 0.9874894022941589, 0.9887881875038147, 0.9889461398124695, 0.9883299469947815, 0.9875083565711975, 0.9870043396949768, 0.9871112108230591, 0.987815260887146, 0.9889156818389893, 0.9901893734931946], "prob_old_token": [0.9343229532241821, 1.0034423212346155e-05, 9.740339237396256e-07, 1.2459638654149785e-08, 1.3508019147678851e-09, 9.579753657007473e-10, 1.0941653139084906e-09, 1.607320498919762e-09, 2.545184951685542e-09, 3.823341199193919e-09, 5.131050251350189e-09, 6.083970660597515e-09, 6.476217340178891e-09, 6.341223990347089e-09, 5.841642281012582e-09], "l1-model.layers.2.mlp.down_proj.weight": [90482.046875], "l2-model.layers.2.mlp.down_proj.weight": [15.426738739013672], "linf-model.layers.2.mlp.down_proj.weight": [0.006271051708608866], "request": {"prompt": "{} is employed in the location of", "subject": "Thilo Sarrazin", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [0.068, 0.0], "prob_new": [0.9343229532241821, 0.9999241232872009], "prob_old": [0.7644492387771606, 0.6759292483329773], "prob_new_token": [0.9343229532241821, 0.9999241232872009], "prob_old_token": [0.9343229532241821, 0.9999241232872009], "l1-model.layers.2.mlp.down_proj.weight": [22537.546875], "l2-model.layers.2.mlp.down_proj.weight": [3.3564412593841553], "linf-model.layers.2.mlp.down_proj.weight": [0.000500023365020752], "request": {"prompt": "{} is employed in the location of", "subject": "Thilo Sarrazin", "target_new": {"str": "Berlin"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [14.093, 10.775, 0.884, 0.004], "prob_new": [7.574101914542553e-07, 2.091812166327145e-05, 0.4131314158439636, 0.9959153532981873], "prob_old": [0.7360422015190125, 0.25674182176589966, 0.19994470477104187, 0.2677799463272095], "prob_new_token": [7.574101914542553e-07, 2.091812166327145e-05, 0.4131314158439636, 0.9959153532981873], "prob_old_token": [0.9117863178253174, 3.0224276770240976e-07, 1.1784477749188227e-07, 4.476480852666498e-10], "l1-model.layers.2.mlp.down_proj.weight": [42758.6171875], "l2-model.layers.2.mlp.down_proj.weight": [6.895002365112305], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024847816675901], "request": {"prompt": "{} is employed in the location of", "subject": "Gregor Gysi", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [7.282, 11.051, 6.97, 0.347, 0.004], "prob_new": [0.0006878288695588708, 1.5867084584897384e-05, 0.0009400645503774285, 0.7070366144180298, 0.9959971904754639], "prob_old": [0.7360422015190125, 0.14090971648693085, 0.058848313987255096, 0.0442955419421196, 0.07827223837375641], "prob_new_token": [0.0006878288695588708, 1.5867084584897384e-05, 0.0009400645503774285, 0.7070366144180298, 0.9959971904754639], "prob_old_token": [0.9117863178253174, 7.566138151560153e-07, 1.090578052753699e-06, 6.046017006156035e-05, 2.7476417017169297e-06], "l1-model.layers.2.mlp.down_proj.weight": [48336.30859375], "l2-model.layers.2.mlp.down_proj.weight": [8.051534652709961], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058276131749153], "request": {"prompt": "{} is employed in the location of", "subject": "Gregor Gysi", "target_new": {"str": "Dresden"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [6.5, 4.847, 0.943, 0.034, 0.024, 0.01, 0.006], "prob_new": [0.32729029655456543, 0.3002736270427704, 0.598684549331665, 0.9669739603996277, 0.976622998714447, 0.9897091388702393, 0.9937634468078613], "prob_old": [0.7360422015190125, 0.2863539159297943, 0.14027920365333557, 0.21268469095230103, 0.19958147406578064, 0.18229500949382782, 0.16957268118858337], "prob_new_token": [6.832719373051077e-05, 0.0008125926251523197, 0.08235422521829605, 0.9132617712020874, 0.9376345276832581, 0.9734798669815063, 0.9837989807128906], "prob_old_token": [0.9117863178253174, 8.587091997469543e-07, 4.689746674557682e-06, 7.789732592300425e-08, 3.202698195536868e-08, 1.4150944416257971e-08, 1.0358365720719576e-08], "l1-model.layers.2.mlp.down_proj.weight": [61341.95703125], "l2-model.layers.2.mlp.down_proj.weight": [10.367626190185547], "linf-model.layers.2.mlp.down_proj.weight": [0.002967442385852337], "request": {"prompt": "{} is employed in the location of", "subject": "Gregor Gysi", "target_new": {"str": "Siena"}, "old_answer": {"str": "Berlin, Germany"}, "seed": 42}}, {"loss_per_step": [5.207, 3.805, 0.187, 0.003], "prob_new": [0.3335115313529968, 0.3356064260005951, 0.8531414866447449, 0.9965624809265137], "prob_old": [0.9384063482284546, 4.714848182629794e-05, 0.0004936575423926115, 9.770663922381573e-08], "prob_new_token": [0.00028724331059493124, 0.031415123492479324, 0.5870380401611328, 0.9978585243225098], "prob_old_token": [0.9384063482284546, 4.714848182629794e-05, 0.0004936575423926115, 9.770663922381573e-08], "l1-model.layers.2.mlp.down_proj.weight": [40942.84375], "l2-model.layers.2.mlp.down_proj.weight": [6.825730800628662], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} is employed in the location of", "subject": "Paul Bocuse", "target_new": {"str": "T\u00fcbingen"}, "old_answer": {"str": "Lyon"}, "seed": 42}}, {"loss_per_step": [5.322, 3.442, 1.248, 0.046, 0.032, 0.017, 0.009], "prob_new": [0.3115661144256592, 0.3252892792224884, 0.43330785632133484, 0.9558736085891724, 0.9697472453117371, 0.983332633972168, 0.9913157224655151], "prob_old": [0.9384063482284546, 5.564235107158311e-05, 6.289021257543936e-05, 3.0765966130275046e-06, 9.945910051101237e-07, 2.1850304676718224e-07, 8.554059860443886e-08], "prob_new_token": [0.00011941250704694539, 0.011140485294163227, 0.10791022330522537, 0.8988814353942871, 0.9160930514335632, 0.9513611793518066, 0.9742870926856995], "prob_old_token": [0.9384063482284546, 5.564235107158311e-05, 6.289021257543936e-05, 3.0765966130275046e-06, 9.945910051101237e-07, 2.1850304676718224e-07, 8.554059860443886e-08], "l1-model.layers.2.mlp.down_proj.weight": [62046.19921875], "l2-model.layers.2.mlp.down_proj.weight": [10.3999605178833], "linf-model.layers.2.mlp.down_proj.weight": [0.0029911473393440247], "request": {"prompt": "{} is employed in the location of", "subject": "Paul Bocuse", "target_new": {"str": "Siena"}, "old_answer": {"str": "Lyon"}, "seed": 42}}, {"loss_per_step": [6.357, 3.387, 0.228, 0.079, 0.002], "prob_new": [0.4924412667751312, 0.2702249586582184, 0.8169232606887817, 0.9270104169845581, 0.9980771541595459], "prob_old": [0.9384063482284546, 1.5350273315561935e-05, 2.1801663024234585e-05, 1.446613509870076e-06, 3.417706864183856e-07], "prob_new_token": [3.0540434181602905e-06, 0.0021224962547421455, 0.6352309584617615, 0.855059802532196, 0.9969412088394165], "prob_old_token": [0.9384063482284546, 1.5350273315561935e-05, 2.1801663024234585e-05, 1.446613509870076e-06, 3.417706864183856e-07], "l1-model.layers.2.mlp.down_proj.weight": [49342.7890625], "l2-model.layers.2.mlp.down_proj.weight": [8.210245132446289], "linf-model.layers.2.mlp.down_proj.weight": [0.002005811780691147], "request": {"prompt": "{} is employed in the location of", "subject": "Paul Bocuse", "target_new": {"str": "Basel"}, "old_answer": {"str": "Lyon"}, "seed": 42}}, {"loss_per_step": [6.81, 3.728, 3.152, 0.17, 0.034, 0.014, 0.007], "prob_new": [0.4923892319202423, 0.4961869418621063, 0.4952120780944824, 0.8556128740310669, 0.9667918086051941, 0.9857872724533081, 0.9929338693618774], "prob_old": [0.9664053320884705, 2.455085677866009e-06, 9.084398584491282e-07, 5.852075628354214e-05, 3.888857827405445e-05, 1.7500895410194062e-05, 5.529683221539017e-06], "prob_new_token": [1.2348584732535528e-06, 0.0005826123524457216, 0.0018511746311560273, 0.7127182483673096, 0.9348019361495972, 0.9727059602737427, 0.9869940280914307], "prob_old_token": [0.9664053320884705, 2.455085677866009e-06, 9.084398584491282e-07, 5.852075628354214e-05, 3.888857827405445e-05, 1.7500895410194062e-05, 5.529683221539017e-06], "l1-model.layers.2.mlp.down_proj.weight": [63860.7421875], "l2-model.layers.2.mlp.down_proj.weight": [10.512048721313477], "linf-model.layers.2.mlp.down_proj.weight": [0.0029715225100517273], "request": {"prompt": "{} is employed in the location of", "subject": "Roland Freisler", "target_new": {"str": "Geneva"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [4.784, 4.393, 2.292, 0.464, 0.093, 0.03, 0.012, 0.007], "prob_new": [0.49776536226272583, 0.39491286873817444, 0.4901195168495178, 0.6954776644706726, 0.914341926574707, 0.9705491662025452, 0.9877363443374634, 0.9934682846069336], "prob_old": [0.9664053320884705, 3.658543573692441e-05, 0.0002493374631740153, 0.00027626287192106247, 3.19850119012699e-06, 1.7527602835798461e-07, 3.141860815958353e-08, 1.127441517922989e-08], "prob_new_token": [7.029492553556338e-05, 0.00019345151667948812, 0.010541737079620361, 0.39876821637153625, 0.8389032483100891, 0.9484874606132507, 0.9791167974472046, 0.988865077495575], "prob_old_token": [0.9664053320884705, 3.658543573692441e-05, 0.0002493374631740153, 0.00027626287192106247, 3.19850119012699e-06, 1.7527602835798461e-07, 3.141860815958353e-08, 1.127441517922989e-08], "l1-model.layers.2.mlp.down_proj.weight": [65108.28125], "l2-model.layers.2.mlp.down_proj.weight": [11.072760581970215], "linf-model.layers.2.mlp.down_proj.weight": [0.003433438017964363], "request": {"prompt": "{} is employed in the location of", "subject": "Roland Freisler", "target_new": {"str": "Mainz"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [6.048, 3.782, 0.086, 0.0], "prob_new": [0.49945420026779175, 0.49951475858688354, 0.9212946891784668, 0.9996215105056763], "prob_old": [0.9664053320884705, 3.29150389006827e-05, 0.00022822478786110878, 3.48764871205276e-07], "prob_new_token": [5.584758127952227e-06, 0.000519135850481689, 0.8426483273506165, 0.9992794394493103], "prob_old_token": [0.9664053320884705, 3.29150389006827e-05, 0.00022822478786110878, 3.48764871205276e-07], "l1-model.layers.2.mlp.down_proj.weight": [43877.7265625], "l2-model.layers.2.mlp.down_proj.weight": [7.028719902038574], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} is employed in the location of", "subject": "Roland Freisler", "target_new": {"str": "Luxembourg"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [3.266, 2.013, 0.722, 0.121, 0.047, 0.019, 0.009], "prob_new": [0.569279670715332, 0.5370926260948181, 0.5912081003189087, 0.8896960616111755, 0.9544503688812256, 0.9810874462127686, 0.9905686378479004], "prob_old": [0.9766571521759033, 0.46584534645080566, 0.504686176776886, 0.49983081221580505, 0.4991081953048706, 0.49919936060905457, 0.4993842840194702], "prob_new_token": [7.691278733545914e-05, 0.0038069637957960367, 0.19474969804286957, 0.9141984581947327, 0.9703165292739868, 0.9906378984451294, 0.9961845278739929], "prob_old_token": [0.9549127817153931, 0.0030598200391978025, 0.014329961501061916, 0.002482765819877386, 0.00043515278957784176, 0.00012139354657847434, 4.7218949475791305e-05], "l1-model.layers.2.mlp.down_proj.weight": [65720.0859375], "l2-model.layers.2.mlp.down_proj.weight": [10.644186019897461], "linf-model.layers.2.mlp.down_proj.weight": [0.002990318462252617], "request": {"prompt": "{} is employed in the location of", "subject": "Carl von Linde", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [12.777, 9.481, 3.799, 0.423, 0.013, 0.007], "prob_new": [2.825985802701325e-06, 7.627248123753816e-05, 0.022395439445972443, 0.6550290584564209, 0.9866107702255249, 0.992597222328186], "prob_old": [0.9766571521759033, 0.45889371633529663, 0.4694194197654724, 0.4427300691604614, 0.4761871099472046, 0.4884321093559265], "prob_new_token": [2.825985802701325e-06, 7.627248123753816e-05, 0.022395439445972443, 0.6550290584564209, 0.9866107702255249, 0.992597222328186], "prob_old_token": [0.9549127817153931, 0.000208937082788907, 0.0013023149222135544, 0.0024164298083633184, 2.8963466320419684e-05, 3.1628628676116932e-06], "l1-model.layers.2.mlp.down_proj.weight": [55278.1953125], "l2-model.layers.2.mlp.down_proj.weight": [9.325950622558594], "linf-model.layers.2.mlp.down_proj.weight": [0.002441766671836376], "request": {"prompt": "{} is employed in the location of", "subject": "Carl von Linde", "target_new": {"str": "Rome"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [8.444, 11.374, 5.319, 0.354, 0.006], "prob_new": [0.00021528614161070436, 1.1488745258247945e-05, 0.004899661522358656, 0.7019835114479065, 0.9936731457710266], "prob_old": [0.9766571521759033, 0.43882888555526733, 0.4874240756034851, 0.4867190718650818, 0.4900546967983246], "prob_new_token": [0.00021528614161070436, 1.1488745258247945e-05, 0.004899661522358656, 0.7019835114479065, 0.9936731457710266], "prob_old_token": [0.9549127817153931, 1.4625131825596327e-06, 0.0007000558543950319, 0.0022101253271102905, 8.63376772031188e-05], "l1-model.layers.2.mlp.down_proj.weight": [48856.2734375], "l2-model.layers.2.mlp.down_proj.weight": [8.103960037231445], "linf-model.layers.2.mlp.down_proj.weight": [0.002005793619900942], "request": {"prompt": "{} is employed in the location of", "subject": "Carl von Linde", "target_new": {"str": "Dresden"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [5.63, 2.626, 0.459, 0.058, 0.023, 0.011, 0.007], "prob_new": [0.48300620913505554, 0.4906895160675049, 0.6959317922592163, 0.9445213675498962, 0.9776068925857544, 0.9891825318336487, 0.9932825565338135], "prob_old": [0.9764662981033325, 0.4995720088481903, 0.49982982873916626, 0.49977511167526245, 0.4997388422489166, 0.4996671676635742, 0.499543696641922], "prob_new_token": [1.3325500731298234e-05, 0.005370356608182192, 0.40502744913101196, 0.8991902470588684, 0.9640282392501831, 0.9854177236557007, 0.9920092821121216], "prob_old_token": [0.9531065225601196, 5.536366006708704e-05, 0.0002681265468709171, 9.769387361302506e-06, 1.5568460867143585e-06, 4.37065494907074e-07, 2.0960614222076401e-07], "l1-model.layers.2.mlp.down_proj.weight": [64928.65625], "l2-model.layers.2.mlp.down_proj.weight": [10.615958213806152], "linf-model.layers.2.mlp.down_proj.weight": [0.0029727788642048836], "request": {"prompt": "{} is employed in the location of", "subject": "Jean Asselborn", "target_new": {"str": "Basel"}, "old_answer": {"str": "Luxembourg"}, "seed": 42}}, {"loss_per_step": [11.965, 7.047, 3.58, 0.172, 0.068, 0.005], "prob_new": [6.362483873090241e-06, 0.0008698496385477483, 0.027862364426255226, 0.8420864939689636, 0.9338089823722839, 0.9950262904167175], "prob_old": [0.9764662981033325, 0.4992614984512329, 0.49826180934906006, 0.4996301531791687, 0.49959874153137207, 0.49972620606422424], "prob_new_token": [6.362483873090241e-06, 0.0008698496385477483, 0.027862364426255226, 0.8420864939689636, 0.9338089823722839, 0.9950262904167175], "prob_old_token": [0.9531065225601196, 8.149985660566017e-05, 0.00022965815151110291, 1.2188324944872875e-05, 5.085071734356461e-06, 7.760626203889842e-07], "l1-model.layers.2.mlp.down_proj.weight": [56570.265625], "l2-model.layers.2.mlp.down_proj.weight": [9.389945030212402], "linf-model.layers.2.mlp.down_proj.weight": [0.0025095511227846146], "request": {"prompt": "{} is employed in the location of", "subject": "Jean Asselborn", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Luxembourg"}, "seed": 42}}, {"loss_per_step": [5.336, 3.668, 1.987, 1.779, 0.102, 0.023, 0.014, 0.018, 0.021, 0.014, 0.006], "prob_new": [0.49632468819618225, 0.4858088195323944, 0.5072832107543945, 0.5116775631904602, 0.9077653884887695, 0.9770671129226685, 0.9859734177589417, 0.9824998378753662, 0.9793765544891357, 0.9860422611236572, 0.9939693808555603], "prob_old": [0.9764662981033325, 0.4981466233730316, 0.4992623031139374, 0.49974358081817627, 0.49957436323165894, 0.49968665838241577, 0.49968624114990234, 0.49953025579452515, 0.49929365515708923, 0.4991520345211029, 0.4989445209503174], "prob_new_token": [2.334475539100822e-05, 0.0006707499851472676, 0.018872009590268135, 0.028672700747847557, 0.8174694776535034, 0.955923855304718, 0.9734593629837036, 0.9668611884117126, 0.9617511630058289, 0.9765864014625549, 0.9926649332046509], "prob_old_token": [0.9531065225601196, 1.4034817468200345e-05, 0.00012541432806756347, 0.0001746167690725997, 1.0068069059343543e-05, 9.698789654066786e-06, 1.3550522453442682e-05, 2.966383362945635e-05, 4.038053521071561e-05, 2.3404161765938625e-05, 6.463923455157783e-06], "l1-model.layers.2.mlp.down_proj.weight": [78554.2890625], "l2-model.layers.2.mlp.down_proj.weight": [13.309393882751465], "linf-model.layers.2.mlp.down_proj.weight": [0.004756847396492958], "request": {"prompt": "{} is employed in the location of", "subject": "Jean Asselborn", "target_new": {"str": "Geneva"}, "old_answer": {"str": "Luxembourg"}, "seed": 42}}, {"loss_per_step": [6.486, 3.68, 0.69, 0.018, 0.007], "prob_new": [0.47778216004371643, 0.4751864969730377, 0.6189488768577576, 0.9821717739105225, 0.9932651519775391], "prob_old": [0.9528805017471313, 0.47527891397476196, 0.41393840312957764, 0.46221432089805603, 0.4791032373905182], "prob_new_token": [2.4327391656697728e-06, 0.000669593398924917, 0.25634947419166565, 0.9754205942153931, 0.9960487484931946], "prob_old_token": [0.9064145088195801, 0.00015519409498665482, 0.0002016959770116955, 1.7625015971134417e-06, 1.1960358392570924e-07], "l1-model.layers.2.mlp.down_proj.weight": [47822.7265625], "l2-model.layers.2.mlp.down_proj.weight": [8.115809440612793], "linf-model.layers.2.mlp.down_proj.weight": [0.002002280205488205], "request": {"prompt": "{} is employed in the location of", "subject": "Franz Josef Strau\u00df", "target_new": {"str": "Basel"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [2.713, 1.119, 0.59, 0.19, 0.031, 0.568, 0.039, 0.024, 0.014, 0.014, 0.011, 0.008], "prob_new": [0.6072892546653748, 0.6628215909004211, 0.8474412560462952, 0.8565369844436646, 0.9708943963050842, 0.8283495306968689, 0.9634435772895813, 0.9773132801055908, 0.9864794015884399, 0.9866483211517334, 0.9887319803237915, 0.9922552704811096], "prob_old": [0.9528805017471313, 0.5071850419044495, 0.4966961145401001, 0.481417715549469, 0.4789910614490509, 0.4848604202270508, 0.4874807894229889, 0.48649317026138306, 0.48306331038475037, 0.47759196162223816, 0.4705192446708679, 0.4624149203300476], "prob_new_token": [2.5011349862325005e-05, 0.005506226792931557, 0.01749512180685997, 0.5606921911239624, 0.8450945615768433, 0.9135315418243408, 0.9252567291259766, 0.9397558569908142, 0.9537726640701294, 0.9649498462677002, 0.9729690551757812, 0.9786338806152344], "prob_old_token": [0.9064145088195801, 0.016930734738707542, 0.0019281349377706647, 6.032551027601585e-05, 7.360991730820388e-06, 1.1355552942404756e-06, 3.156605714593752e-07, 1.4523661207022087e-07, 7.808535684716844e-08, 4.3076386901930164e-08, 2.3916381763910977e-08, 1.3911586549397725e-08], "l1-model.layers.2.mlp.down_proj.weight": [72561.5625], "l2-model.layers.2.mlp.down_proj.weight": [13.054561614990234], "linf-model.layers.2.mlp.down_proj.weight": [0.005220329388976097], "request": {"prompt": "{} is employed in the location of", "subject": "Franz Josef Strau\u00df", "target_new": {"str": "Albuquerque, New Mexico"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [4.496, 2.764, 0.427, 0.003], "prob_new": [0.4436042010784149, 0.4332691729068756, 0.7128201723098755, 0.9969089031219482], "prob_old": [0.9528805017471313, 0.45767465233802795, 0.4987226724624634, 0.4889257252216339], "prob_new_token": [0.00014025179552845657, 0.004611234646290541, 0.4259066879749298, 0.9938485622406006], "prob_old_token": [0.9064145088195801, 0.00291642383672297, 0.00015843533037696034, 2.85002641930987e-07], "l1-model.layers.2.mlp.down_proj.weight": [37087.87890625], "l2-model.layers.2.mlp.down_proj.weight": [6.485838890075684], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024798922240734], "request": {"prompt": "{} is employed in the location of", "subject": "Franz Josef Strau\u00df", "target_new": {"str": "Mainz"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [2.103, 1.417, 0.621, 0.049, 0.014, 0.006], "prob_new": [0.6555818915367126, 0.6405190229415894, 0.8351609706878662, 0.9544501304626465, 0.9866819977760315, 0.9940948486328125], "prob_old": [0.9733995795249939, 6.4458131419087294e-06, 0.006975933909416199, 0.006725290324538946, 0.00018596422160044312, 3.57962962880265e-05], "prob_new_token": [2.3411574147758074e-05, 0.0005893968045711517, 0.015451330691576004, 0.8102508187294006, 0.9716715812683105, 0.9805561900138855], "prob_old_token": [0.9733995795249939, 6.4458131419087294e-06, 0.006975933909416199, 0.006725290324538946, 0.00018596422160044312, 3.57962962880265e-05], "l1-model.layers.2.mlp.down_proj.weight": [54255.109375], "l2-model.layers.2.mlp.down_proj.weight": [9.289346694946289], "linf-model.layers.2.mlp.down_proj.weight": [0.0025055985897779465], "request": {"prompt": "{} is employed in the location of", "subject": "Romulus", "target_new": {"str": "Albuquerque, New Mexico"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [4.071, 2.111, 0.211, 0.077, 0.038, 0.028, 0.022, 0.017, 0.014, 0.014, 0.014, 0.012, 0.01, 0.008], "prob_new": [0.5962883234024048, 0.46082064509391785, 0.825762152671814, 0.9276449084281921, 0.9626517295837402, 0.9727433919906616, 0.9784249067306519, 0.9832486510276794, 0.986243724822998, 0.9860538244247437, 0.9858367443084717, 0.9876376986503601, 0.9898614883422852, 0.9918999671936035], "prob_old": [0.9733995795249939, 0.001460057101212442, 0.00112072192132473, 0.000400426855776459, 0.00024973106337711215, 0.0001706129260128364, 0.00010918165207840502, 6.499680603155866e-05, 4.8270307161146775e-05, 7.287457992788404e-05, 7.227562309708446e-05, 5.958069232292473e-05, 4.571269528241828e-05, 3.3789077861001715e-05], "prob_new_token": [6.243386906135129e-06, 0.00434160465374589, 0.8868494033813477, 0.9521244764328003, 0.959083080291748, 0.9612895846366882, 0.9663078188896179, 0.9729889631271362, 0.9783443212509155, 0.9813063144683838, 0.9813750386238098, 0.9828600883483887, 0.9852136969566345, 0.987673819065094], "prob_old_token": [0.9733995795249939, 0.001460057101212442, 0.00112072192132473, 0.000400426855776459, 0.00024973106337711215, 0.0001706129260128364, 0.00010918165207840502, 6.499680603155866e-05, 4.8270307161146775e-05, 7.287457992788404e-05, 7.227562309708446e-05, 5.958069232292473e-05, 4.571269528241828e-05, 3.3789077861001715e-05], "l1-model.layers.2.mlp.down_proj.weight": [87313.046875], "l2-model.layers.2.mlp.down_proj.weight": [14.924736976623535], "linf-model.layers.2.mlp.down_proj.weight": [0.006402904167771339], "request": {"prompt": "{} is employed in the location of", "subject": "Romulus", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [7.243, 1.961, 1.384, 0.046, 0.033, 0.003], "prob_new": [0.27707555890083313, 0.5075520873069763, 0.531230092048645, 0.9558049440383911, 0.9682788848876953, 0.996793270111084], "prob_old": [0.9733995795249939, 0.07408187538385391, 0.0003662360832095146, 7.764127803966403e-05, 6.805354496464133e-05, 6.157340067147743e-06], "prob_new_token": [9.228345447809261e-07, 0.019886737689375877, 0.06281261891126633, 0.9121056199073792, 0.9374791979789734, 0.9940083622932434], "prob_old_token": [0.9733995795249939, 0.07408187538385391, 0.0003662360832095146, 7.764127803966403e-05, 6.805354496464133e-05, 6.157340067147743e-06], "l1-model.layers.2.mlp.down_proj.weight": [53367.7578125], "l2-model.layers.2.mlp.down_proj.weight": [9.142062187194824], "linf-model.layers.2.mlp.down_proj.weight": [0.0025082253850996494], "request": {"prompt": "{} is employed in the location of", "subject": "Romulus", "target_new": {"str": "Mainz"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [6.571, 4.884, 2.499, 0.865, 0.009], "prob_new": [0.4702664613723755, 0.24414339661598206, 0.4668581783771515, 0.5742429494857788, 0.9909501075744629], "prob_old": [0.9724620580673218, 0.45496389269828796, 0.13766427338123322, 0.09192249178886414, 0.24453428387641907], "prob_new_token": [2.0846814550168347e-06, 0.00011719582107616588, 0.007290680892765522, 0.18370775878429413, 0.987533688545227], "prob_old_token": [0.9455701112747192, 0.0006461329176090658, 0.0005805291584692895, 0.00043894193368032575, 3.464810652076267e-05], "l1-model.layers.2.mlp.down_proj.weight": [48318.8984375], "l2-model.layers.2.mlp.down_proj.weight": [8.033061981201172], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058220252394676], "request": {"prompt": "{} is employed in the location of", "subject": "Ludwig I of Bavaria", "target_new": {"str": "Basel"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [2.575, 1.676, 0.971, 0.296, 0.033, 0.016, 0.003], "prob_new": [0.6058613657951355, 0.5941038131713867, 0.7603882551193237, 0.8705951571464539, 0.969845175743103, 0.9842509031295776, 0.9968190789222717], "prob_old": [0.9724620580673218, 0.48751845955848694, 0.42398369312286377, 0.49829238653182983, 0.22847148776054382, 0.09475431591272354, 0.076900415122509], "prob_new_token": [2.9897177228122018e-05, 0.0008561454596929252, 0.002544688992202282, 0.1311628520488739, 0.8070276975631714, 0.9304596185684204, 0.9845243692398071], "prob_old_token": [0.9455701112747192, 0.00030569458613172174, 0.006874118000268936, 0.03008505515754223, 0.000165455843671225, 1.2351229088380933e-05, 1.1385274092390318e-06], "l1-model.layers.2.mlp.down_proj.weight": [60500.40625], "l2-model.layers.2.mlp.down_proj.weight": [10.262862205505371], "linf-model.layers.2.mlp.down_proj.weight": [0.003007567021995783], "request": {"prompt": "{} is employed in the location of", "subject": "Ludwig I of Bavaria", "target_new": {"str": "Albuquerque, New Mexico"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [5.016, 5.317, 2.839, 1.102, 0.255, 0.073, 0.009], "prob_new": [0.33386874198913574, 0.32386302947998047, 0.3616099953651428, 0.6703544855117798, 0.8206207752227783, 0.9335813522338867, 0.9909786581993103], "prob_old": [0.9724620580673218, 0.3960351347923279, 0.45139279961586, 0.4794062674045563, 0.47211340069770813, 0.48899388313293457, 0.4927980601787567], "prob_new_token": [9.381613199366257e-05, 0.000603680731728673, 0.0023809594567865133, 0.03770383447408676, 0.4684551954269409, 0.8079574704170227, 0.9753995537757874], "prob_old_token": [0.9455701112747192, 0.002096089767292142, 0.006432346533983946, 0.01519272942095995, 0.014189381152391434, 0.009625589475035667, 0.0008996334508992732], "l1-model.layers.2.mlp.down_proj.weight": [59491.6015625], "l2-model.layers.2.mlp.down_proj.weight": [10.176125526428223], "linf-model.layers.2.mlp.down_proj.weight": [0.003012748435139656], "request": {"prompt": "{} is employed in the location of", "subject": "Ludwig I of Bavaria", "target_new": {"str": "T\u00fcbingen"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [14.399, 4.517, 0.113, 0.001], "prob_new": [5.577080059993023e-07, 0.010926071554422379, 0.8928268551826477, 0.998930037021637], "prob_old": [0.9686195850372314, 0.3987697958946228, 0.3574388027191162, 0.5375990271568298], "prob_new_token": [5.577080059993023e-07, 0.010926071554422379, 0.8928268551826477, 0.998930037021637], "prob_old_token": [0.9083859920501709, 0.006430037785321474, 0.0006538880406878889, 4.355320015747566e-06], "l1-model.layers.2.mlp.down_proj.weight": [40744.3046875], "l2-model.layers.2.mlp.down_proj.weight": [6.8174591064453125], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024745371192694], "request": {"prompt": "{} is employed in the location of", "subject": "Duccio di Buoninsegna", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Siena"}, "seed": 42}}, {"loss_per_step": [7.282, 2.969, 0.136, 0.015, 0.008], "prob_new": [0.3339768052101135, 0.34076711535453796, 0.8782808780670166, 0.9849714040756226, 0.991700291633606], "prob_old": [0.9686195850372314, 0.4014849066734314, 0.5173925161361694, 0.5004599094390869, 0.47839245200157166], "prob_new_token": [0.0021575745195150375, 0.019515665248036385, 0.874238908290863, 0.9572149515151978, 0.975441038608551], "prob_old_token": [0.9083859920501709, 0.005099542438983917, 0.00019120702927466482, 9.387807222083211e-05, 5.710338882636279e-05], "l1-model.layers.2.mlp.down_proj.weight": [49204.9609375], "l2-model.layers.2.mlp.down_proj.weight": [8.24528980255127], "linf-model.layers.2.mlp.down_proj.weight": [0.0020015863701701164], "request": {"prompt": "{} is employed in the location of", "subject": "Duccio di Buoninsegna", "target_new": {"str": "T\u00fcbingen"}, "old_answer": {"str": "Siena"}, "seed": 42}}, {"loss_per_step": [8.45, 2.263, 0.018, 0.004], "prob_new": [0.0002139986027032137, 0.1040366142988205, 0.9817943572998047, 0.996159017086029], "prob_old": [0.9686195850372314, 0.3621066212654114, 0.36358439922332764, 0.36556005477905273], "prob_new_token": [0.0002139986027032137, 0.1040366142988205, 0.9817943572998047, 0.996159017086029], "prob_old_token": [0.9083859920501709, 0.0006397033575922251, 2.103482802340295e-05, 5.078185495221987e-06], "l1-model.layers.2.mlp.down_proj.weight": [43115.24609375], "l2-model.layers.2.mlp.down_proj.weight": [6.976617813110352], "linf-model.layers.2.mlp.down_proj.weight": [0.001502377912402153], "request": {"prompt": "{} is employed in the location of", "subject": "Duccio di Buoninsegna", "target_new": {"str": "Rome"}, "old_answer": {"str": "Siena"}, "seed": 42}}, {"loss_per_step": [6.784, 6.528, 2.719, 0.95, 0.162, 0.037, 0.009], "prob_new": [0.1568821519613266, 0.3557092845439911, 0.4979024827480316, 0.5737717151641846, 0.8616869449615479, 0.9644943475723267, 0.9907379746437073], "prob_old": [0.982763946056366, 6.7133000811736565e-06, 0.00013621323159895837, 6.956600554985926e-05, 1.1107485988759436e-05, 1.2643967011172208e-06, 1.4152058724903327e-07], "prob_new_token": [4.084867669007508e-06, 3.0059404707571957e-06, 0.0043814098462462425, 0.15002208948135376, 0.7242755889892578, 0.9295961260795593, 0.9818578362464905], "prob_old_token": [0.982763946056366, 6.7133000811736565e-06, 0.00013621323159895837, 6.956600554985926e-05, 1.1107485988759436e-05, 1.2643967011172208e-06, 1.4152058724903327e-07], "l1-model.layers.2.mlp.down_proj.weight": [61042.359375], "l2-model.layers.2.mlp.down_proj.weight": [10.37825870513916], "linf-model.layers.2.mlp.down_proj.weight": [0.002978675067424774], "request": {"prompt": "{} is employed in the location of", "subject": "Clement I", "target_new": {"str": "Basel"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [8.035, 5.466, 0.491, 0.042, 0.011, 0.004], "prob_new": [0.0003239733923692256, 0.004226887132972479, 0.6117391586303711, 0.9585880637168884, 0.989236056804657, 0.9956070780754089], "prob_old": [0.982763946056366, 2.974048584292177e-05, 0.00022140986402519047, 8.22713536763331e-06, 9.505580464974628e-07, 1.8670138501875044e-07], "prob_new_token": [0.0003239733923692256, 0.004226887132972479, 0.6117391586303711, 0.9585880637168884, 0.989236056804657, 0.9956070780754089], "prob_old_token": [0.982763946056366, 2.974048584292177e-05, 0.00022140986402519047, 8.22713536763331e-06, 9.505580464974628e-07, 1.8670138501875044e-07], "l1-model.layers.2.mlp.down_proj.weight": [53918.6953125], "l2-model.layers.2.mlp.down_proj.weight": [9.284485816955566], "linf-model.layers.2.mlp.down_proj.weight": [0.002464964985847473], "request": {"prompt": "{} is employed in the location of", "subject": "Clement I", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [7.185, 4.738, 1.323, 0.023, 0.008], "prob_new": [0.4997584819793701, 0.49936360120773315, 0.5352307558059692, 0.9779112339019775, 0.9918004870414734], "prob_old": [0.982763946056366, 1.0154545634577516e-05, 4.241596980136819e-05, 2.732391749304952e-06, 6.564234809047775e-07], "prob_new_token": [5.744815894104249e-07, 7.676066888961941e-05, 0.07095005363225937, 0.9563444256782532, 0.9842235445976257], "prob_old_token": [0.982763946056366, 1.0154545634577516e-05, 4.241596980136819e-05, 2.732391749304952e-06, 6.564234809047775e-07], "l1-model.layers.2.mlp.down_proj.weight": [45985.0546875], "l2-model.layers.2.mlp.down_proj.weight": [7.9602580070495605], "linf-model.layers.2.mlp.down_proj.weight": [0.0019997693598270416], "request": {"prompt": "{} is employed in the location of", "subject": "Clement I", "target_new": {"str": "Luxembourg"}, "old_answer": {"str": "Rome"}, "seed": 42}}, {"loss_per_step": [4.713, 5.334, 3.472, 1.99, 0.708, 0.194, 0.041, 0.017, 0.01], "prob_new": [0.330953449010849, 0.31921541690826416, 0.3350282311439514, 0.4704783856868744, 0.67489093542099, 0.8527286052703857, 0.9609745740890503, 0.9831643104553223, 0.9905098676681519], "prob_old": [0.9055588245391846, 0.32556968927383423, 0.3308267593383789, 0.33368393778800964, 0.33309876918792725, 0.33370035886764526, 0.33334311842918396, 0.3327736556529999, 0.33230146765708923], "prob_new_token": [0.00012425050954334438, 0.0006613832665607333, 0.001802050624974072, 0.006274401675909758, 0.13417713344097137, 0.5586186647415161, 0.8832852840423584, 0.949761152267456, 0.9717203974723816], "prob_old_token": [0.9061473608016968, 0.0004346940550021827, 0.001685633440501988, 0.00127565732691437, 0.0005263132625259459, 0.0019413791596889496, 0.001415818347595632, 0.0006538238376379013, 0.0002556859399192035], "l1-model.layers.2.mlp.down_proj.weight": [76077.859375], "l2-model.layers.2.mlp.down_proj.weight": [12.431925773620605], "linf-model.layers.2.mlp.down_proj.weight": [0.003908578306436539], "request": {"prompt": "{} is employed in the location of", "subject": "Ferdinand Christian Baur", "target_new": {"str": "Siena"}, "old_answer": {"str": "T\u00fcbingen"}, "seed": 42}}, {"loss_per_step": [1.927, 1.585, 0.82, 0.369, 0.006], "prob_new": [0.6580241322517395, 0.6729074120521545, 0.7993633151054382, 0.8529409170150757, 0.9939115047454834], "prob_old": [0.9055588245391846, 0.330097496509552, 0.33386942744255066, 0.3383760154247284, 0.3334697484970093], "prob_new_token": [0.0010012241546064615, 0.0006846851320005953, 0.0050257002003490925, 0.08468033373355865, 0.9842509031295776], "prob_old_token": [0.9061473608016968, 0.0005027464358136058, 0.0013378605945035815, 0.01390511728823185, 0.001396551844663918], "l1-model.layers.2.mlp.down_proj.weight": [51420.19140625], "l2-model.layers.2.mlp.down_proj.weight": [8.28032112121582], "linf-model.layers.2.mlp.down_proj.weight": [0.002005734946578741], "request": {"prompt": "{} is employed in the location of", "subject": "Ferdinand Christian Baur", "target_new": {"str": "Albuquerque, New Mexico"}, "old_answer": {"str": "T\u00fcbingen"}, "seed": 42}}, {"loss_per_step": [10.298, 12.608, 5.735, 0.734, 0.006], "prob_new": [3.3698586776154116e-05, 3.343584467074834e-06, 0.0032297668512910604, 0.48010388016700745, 0.9937722682952881], "prob_old": [0.9055588245391846, 0.32437315583229065, 0.3326079547405243, 0.33897504210472107, 0.3427731692790985], "prob_new_token": [3.3698586776154116e-05, 3.343584467074834e-06, 0.0032297668512910604, 0.48010388016700745, 0.9937722682952881], "prob_old_token": [0.9061473608016968, 0.0002354792959522456, 0.0011607096530497074, 0.0009325332357548177, 1.4795601600781083e-05], "l1-model.layers.2.mlp.down_proj.weight": [51885.31640625], "l2-model.layers.2.mlp.down_proj.weight": [8.315462112426758], "linf-model.layers.2.mlp.down_proj.weight": [0.002005783375352621], "request": {"prompt": "{} is employed in the location of", "subject": "Ferdinand Christian Baur", "target_new": {"str": "Dresden"}, "old_answer": {"str": "T\u00fcbingen"}, "seed": 42}}, {"loss_per_step": [10.604, 10.104, 2.196, 0.007], "prob_new": [2.4816756194923073e-05, 4.0911971154855564e-05, 0.11129853129386902, 0.9926850199699402], "prob_old": [0.9578900337219238, 0.0179485734552145, 0.3224341571331024, 0.3470301330089569], "prob_new_token": [2.4816756194923073e-05, 4.0911971154855564e-05, 0.11129853129386902, 0.9926850199699402], "prob_old_token": [0.9159052968025208, 1.5385762708319817e-06, 4.972570877725957e-06, 1.9394443029341346e-07], "l1-model.layers.2.mlp.down_proj.weight": [40803.50390625], "l2-model.layers.2.mlp.down_proj.weight": [6.762923717498779], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024840831756592], "request": {"prompt": "{} is employed in the location of", "subject": "Johannes Gutenberg", "target_new": {"str": "Lyon"}, "old_answer": {"str": "Mainz"}, "seed": 42}}, {"loss_per_step": [5.066, 2.903, 1.397, 0.71, 0.162, 0.024, 0.008], "prob_new": [0.3298782706260681, 0.43482255935668945, 0.6659399271011353, 0.7050024271011353, 0.8711361885070801, 0.9766616225242615, 0.9924535155296326], "prob_old": [0.9578900337219238, 0.498091459274292, 0.49899375438690186, 0.4995359778404236, 0.49915239214897156, 0.4992043077945709, 0.4991311728954315], "prob_new_token": [3.596622991608456e-05, 0.0005093471263535321, 0.01542424876242876, 0.11938901245594025, 0.6173006296157837, 0.9340460300445557, 0.9816732406616211], "prob_old_token": [0.9159052968025208, 0.0001100744993891567, 0.0019195942441001534, 0.0007970565930008888, 0.00012345360300969332, 2.4709162971703336e-05, 5.9281856010784395e-06], "l1-model.layers.2.mlp.down_proj.weight": [57727.5546875], "l2-model.layers.2.mlp.down_proj.weight": [10.095582962036133], "linf-model.layers.2.mlp.down_proj.weight": [0.002978529781103134], "request": {"prompt": "{} is employed in the location of", "subject": "Johannes Gutenberg", "target_new": {"str": "Siena"}, "old_answer": {"str": "Mainz"}, "seed": 42}}, {"loss_per_step": [4.675, 3.589, 0.593, 0.031, 0.036, 0.032, 0.019, 0.012, 0.008], "prob_new": [0.5085265636444092, 0.5085819959640503, 0.6787349581718445, 0.9692467451095581, 0.9648888111114502, 0.9687886238098145, 0.9808289408683777, 0.9882428050041199, 0.9919823408126831], "prob_old": [0.9578900337219238, 0.0025302430149167776, 0.017987966537475586, 0.040729280561208725, 0.064727284014225, 0.13313603401184082, 0.15763619542121887, 0.1666126251220703, 0.17738847434520721], "prob_new_token": [1.4072728617975372e-06, 3.8362017221516e-05, 0.20175419747829437, 0.9897263646125793, 0.9762682318687439, 0.9635292291641235, 0.9739185571670532, 0.9844207167625427, 0.9901286959648132], "prob_old_token": [0.9159052968025208, 5.995557330606971e-07, 1.4402202396013308e-05, 2.3770307961967774e-06, 1.392641479469603e-06, 1.0368022458351334e-06, 6.760764108548756e-07, 4.370571673462109e-07, 2.721338034916698e-07], "l1-model.layers.2.mlp.down_proj.weight": [72273.921875], "l2-model.layers.2.mlp.down_proj.weight": [12.229621887207031], "linf-model.layers.2.mlp.down_proj.weight": [0.004000983200967312], "request": {"prompt": "{} is employed in the location of", "subject": "Johannes Gutenberg", "target_new": {"str": "Barcelona, Spain"}, "old_answer": {"str": "Mainz"}, "seed": 42}}, {"loss_per_step": [3.967, 1.6, 0.745, 0.301, 0.03, 0.013, 0.007], "prob_new": [0.5878658294677734, 0.748384416103363, 0.8014259338378906, 0.8405012488365173, 0.9719163179397583, 0.9872617721557617, 0.9929018020629883], "prob_old": [0.9836869239807129, 0.8514506220817566, 0.781193196773529, 0.7807801961898804, 0.7498694062232971, 0.7494466304779053, 0.7489009499549866], "prob_new_token": [1.37012065692943e-07, 0.00044141151010990143, 0.024553166702389717, 0.22840029001235962, 0.8779576420783997, 0.9557445645332336, 0.9800873398780823], "prob_old_token": [0.9354346990585327, 0.41003990173339844, 0.1284824013710022, 0.12396078556776047, 0.0006772735505364835, 2.6338864699937403e-05, 4.597512088366784e-06], "l1-model.layers.2.mlp.down_proj.weight": [57928.78125], "l2-model.layers.2.mlp.down_proj.weight": [10.092843055725098], "linf-model.layers.2.mlp.down_proj.weight": [0.002993335947394371], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Canada", "target_new": {"str": "Mahmoud Abbas"}, "old_answer": {"str": "Justin Trudeau"}, "seed": 42}}, {"loss_per_step": [5.202, 2.411, 1.844, 0.542, 0.047, 0.012, 0.007], "prob_new": [0.18823932111263275, 0.6622666716575623, 0.7269732356071472, 0.8061249852180481, 0.9565567970275879, 0.9877118468284607, 0.9930227398872375], "prob_old": [0.9836869239807129, 0.7577598094940186, 0.9586328268051147, 0.7558294534683228, 0.7465152144432068, 0.7193194627761841, 0.5998685956001282], "prob_new_token": [5.17583828241186e-07, 1.374954626953695e-05, 0.00015563072520308197, 0.06916506588459015, 0.8187153339385986, 0.9659103155136108, 0.9916677474975586], "prob_old_token": [0.9354346990585327, 0.045190420001745224, 0.8381040096282959, 0.02946469932794571, 9.005570973386057e-06, 3.8503014820889803e-07, 4.421101706952868e-08], "l1-model.layers.2.mlp.down_proj.weight": [61460.3828125], "l2-model.layers.2.mlp.down_proj.weight": [10.339517593383789], "linf-model.layers.2.mlp.down_proj.weight": [0.0029785428196191788], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Canada", "target_new": {"str": "Francisco Sagasti"}, "old_answer": {"str": "Justin Trudeau"}, "seed": 42}}, {"loss_per_step": [3.439, 2.178, 0.853, 1.195, 7.159, 1.005, 0.183, 0.07, 0.03, 0.012, 0.009], "prob_new": [0.5128783583641052, 0.541764497756958, 0.8138728737831116, 0.5502991080284119, 0.004096562974154949, 0.719601035118103, 0.8941900134086609, 0.9438273310661316, 0.9728718996047974, 0.9882626533508301, 0.9909520149230957], "prob_old": [0.9836869239807129, 0.7509840726852417, 0.7538207769393921, 0.6351817846298218, 0.111110158264637, 0.593098521232605, 0.7867884635925293, 0.7504833936691284, 0.7453702688217163, 0.7399598360061646, 0.7304443120956421], "prob_new_token": [7.260068741743453e-06, 0.00621078722178936, 0.0036405634600669146, 0.01070878654718399, 0.006622644141316414, 0.016109324991703033, 0.28411024808883667, 0.6218430995941162, 0.8252786993980408, 0.9370793104171753, 0.97020423412323], "prob_old_token": [0.9354346990585327, 0.009201736189424992, 0.023939911276102066, 0.11051204800605774, 0.001382805174216628, 0.005532780196517706, 0.1711205095052719, 0.020306997001171112, 0.006713422946631908, 0.001108158496208489, 0.00018745564739219844], "l1-model.layers.2.mlp.down_proj.weight": [67250.890625], "l2-model.layers.2.mlp.down_proj.weight": [12.133291244506836], "linf-model.layers.2.mlp.down_proj.weight": [0.004865724593400955], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Canada", "target_new": {"str": "Eleni Kounalakis"}, "old_answer": {"str": "Justin Trudeau"}, "seed": 42}}, {"loss_per_step": [6.5, 3.146, 1.247, 0.043, 0.016, 0.012, 0.011, 0.01, 0.01], "prob_new": [0.31007474660873413, 0.33863863348960876, 0.616583526134491, 0.9586760401725769, 0.9842368960380554, 0.9878420233726501, 0.9890939593315125, 0.9897985458374023, 0.9905165433883667], "prob_old": [0.9563676714897156, 0.0893704742193222, 0.45028671622276306, 0.48025619983673096, 0.4816039502620697, 0.4856462776660919, 0.4873509407043457, 0.4884875416755676, 0.48970043659210205], "prob_new_token": [0.9132179021835327, 0.009280028752982616, 0.820851743221283, 0.9602941274642944, 0.9632043242454529, 0.9712916612625122, 0.9747015237808228, 0.9769749641418457, 0.9794008135795593], "prob_old_token": [0.9132179021835327, 0.009280028752982616, 0.820851743221283, 0.9602941274642944, 0.9632043242454529, 0.9712916612625122, 0.9747015237808228, 0.9769749641418457, 0.9794008135795593], "l1-model.layers.2.mlp.down_proj.weight": [67723.3046875], "l2-model.layers.2.mlp.down_proj.weight": [11.726677894592285], "linf-model.layers.2.mlp.down_proj.weight": [0.0038525969721376896], "request": {"prompt": "{} is held by", "subject": "Governor of Vermont", "target_new": {"str": "Phil Goff"}, "old_answer": {"str": "Phil Scott"}, "seed": 42}}, {"loss_per_step": [5.583, 4.196, 2.717, 0.284, 0.002], "prob_new": [0.4005705714225769, 0.6537013053894043, 0.6636987924575806, 0.8057669997215271, 0.9981452822685242], "prob_old": [0.9563676714897156, 0.27447089552879333, 0.44491738080978394, 0.012243412435054779, 0.0012496213894337416], "prob_new_token": [1.704079437558903e-07, 3.5467780890030554e-06, 0.00029149666079320014, 0.43325239419937134, 0.9964848756790161], "prob_old_token": [0.9132179021835327, 4.425546285347082e-05, 0.00011608989007072523, 0.00010240481060463935, 7.943766178186706e-08], "l1-model.layers.2.mlp.down_proj.weight": [47923.4453125], "l2-model.layers.2.mlp.down_proj.weight": [8.044452667236328], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057838410139084], "request": {"prompt": "{} is held by", "subject": "Governor of Vermont", "target_new": {"str": "Boris Johnson"}, "old_answer": {"str": "Phil Scott"}, "seed": 42}}, {"loss_per_step": [4.638, 3.133, 1.039, 0.102, 0.009], "prob_new": [0.39535877108573914, 0.6203927397727966, 0.7970761060714722, 0.9170415997505188, 0.9910578727722168], "prob_old": [0.9563676714897156, 0.4985862076282501, 0.5013090372085571, 0.49974164366722107, 0.4968700706958771], "prob_new_token": [5.966256821920979e-07, 9.48103604514472e-07, 0.00565678533166647, 0.6230924129486084, 0.9656474590301514], "prob_old_token": [0.9132179021835327, 0.0004848204262088984, 0.0031993130687624216, 0.0018061239970847964, 0.00021089674555696547], "l1-model.layers.2.mlp.down_proj.weight": [47869.04296875], "l2-model.layers.2.mlp.down_proj.weight": [8.105561256408691], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056571811437607], "request": {"prompt": "{} is held by", "subject": "Governor of Vermont", "target_new": {"str": "Cyril Ramaphosa"}, "old_answer": {"str": "Phil Scott"}, "seed": 42}}, {"loss_per_step": [5.656, 2.954, 0.175, 0.005], "prob_new": [0.28590595722198486, 0.34040018916130066, 0.8466136455535889, 0.9954535365104675], "prob_old": [0.9828802943229675, 0.3262157738208771, 0.297973096370697, 0.33054763078689575], "prob_new_token": [0.000831297249533236, 0.010293743573129177, 0.7381209135055542, 0.9924881458282471], "prob_old_token": [0.9499223828315735, 0.00010399324673926458, 0.00037020083982497454, 4.728160547529114e-06], "l1-model.layers.2.mlp.down_proj.weight": [44067.12109375], "l2-model.layers.2.mlp.down_proj.weight": [7.019259929656982], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024729073047638], "request": {"prompt": "{} is held by", "subject": "Lucasian Professor of Mathematics", "target_new": {"str": "Andrew Cuomo"}, "old_answer": {"str": "Stephen Hawking"}, "seed": 42}}, {"loss_per_step": [4.712, 1.883, 0.544, 0.07, 0.017, 0.011, 0.006], "prob_new": [0.1848636120557785, 0.3941788673400879, 0.78177410364151, 0.9369530081748962, 0.9830266833305359, 0.9888368844985962, 0.9938163757324219], "prob_old": [0.9828802943229675, 0.3350183963775635, 0.3349386155605316, 0.32923680543899536, 0.3285279870033264, 0.3236044943332672, 0.2572602331638336], "prob_new_token": [1.055113534675911e-05, 0.007823665626347065, 0.07959356158971786, 0.7682611346244812, 0.9347178936004639, 0.9550380706787109, 0.9751639366149902], "prob_old_token": [0.9499223828315735, 0.000369152839994058, 0.0006975936703383923, 3.8104448321973905e-05, 1.2369169780868106e-05, 8.72948112373706e-06, 3.6099677345191594e-06], "l1-model.layers.2.mlp.down_proj.weight": [63381.45703125], "l2-model.layers.2.mlp.down_proj.weight": [10.541107177734375], "linf-model.layers.2.mlp.down_proj.weight": [0.002987898886203766], "request": {"prompt": "{} is held by", "subject": "Lucasian Professor of Mathematics", "target_new": {"str": "Xavier Bettel"}, "old_answer": {"str": "Stephen Hawking"}, "seed": 42}}, {"loss_per_step": [7.245, 2.92, 0.963, 0.093, 0.023, 0.008], "prob_new": [0.15255168080329895, 0.45635277032852173, 0.5643660426139832, 0.9129771590232849, 0.9774774312973022, 0.9918339252471924], "prob_old": [0.9828802943229675, 0.33232808113098145, 0.30218666791915894, 0.3032463788986206, 0.30951735377311707, 0.31597036123275757], "prob_new_token": [9.981170556727648e-09, 0.0003662552626337856, 0.09096205234527588, 0.9011858701705933, 0.9862098097801208, 0.9969842433929443], "prob_old_token": [0.9499223828315735, 8.202923345379531e-05, 0.0005472395569086075, 1.4928471046005143e-06, 8.90107685336261e-08, 1.4168796802493944e-08], "l1-model.layers.2.mlp.down_proj.weight": [59457.8359375], "l2-model.layers.2.mlp.down_proj.weight": [9.602148056030273], "linf-model.layers.2.mlp.down_proj.weight": [0.0024762041866779327], "request": {"prompt": "{} is held by", "subject": "Lucasian Professor of Mathematics", "target_new": {"str": "Wang Yi"}, "old_answer": {"str": "Stephen Hawking"}, "seed": 42}}, {"loss_per_step": [2.15, 1.095, 0.895, 0.22, 0.183, 0.142, 0.022, 0.006], "prob_new": [0.6705139875411987, 0.8753315210342407, 0.8733122944831848, 0.9013159275054932, 0.9083499908447266, 0.9187289476394653, 0.9795420169830322, 0.9944273829460144], "prob_old": [0.9840212464332581, 0.5761485695838928, 0.46935951709747314, 0.6753832101821899, 0.7093616724014282, 0.6055765748023987, 0.4423309564590454, 0.3740284740924835], "prob_new_token": [5.104903380015458e-07, 5.9740006690844893e-05, 0.0003672787279356271, 0.14251472055912018, 0.19718387722969055, 0.2823633849620819, 0.8301208019256592, 0.9678433537483215], "prob_old_token": [0.9386975765228271, 3.8054497508710483e-06, 3.467380793154007e-06, 2.3579443109156273e-07, 4.819500531993981e-07, 5.17508033226477e-06, 7.647496431673062e-07, 2.404448480319843e-07], "l1-model.layers.2.mlp.down_proj.weight": [63054.796875], "l2-model.layers.2.mlp.down_proj.weight": [10.940581321716309], "linf-model.layers.2.mlp.down_proj.weight": [0.0034961067140102386], "request": {"prompt": "{} is held by", "subject": "Lieutenant Governor of New York", "target_new": {"str": "Andres Manuel Lopez Obrador"}, "old_answer": {"str": "Kathy Hochul"}, "seed": 42}}, {"loss_per_step": [4.749, 2.582, 1.005, 0.026, 0.002], "prob_new": [0.5886369347572327, 0.6662447452545166, 0.6819474697113037, 0.975256085395813, 0.9979693293571472], "prob_old": [0.9840212464332581, 0.7134721279144287, 0.6767303943634033, 0.6481263637542725, 0.6341323256492615], "prob_new_token": [8.464037932753854e-07, 0.0004331887175794691, 0.04928257316350937, 0.9272745251655579, 0.9954406023025513], "prob_old_token": [0.9386975765228271, 0.0015269387513399124, 0.002236142521724105, 7.321096927626058e-05, 1.0149767604161752e-06], "l1-model.layers.2.mlp.down_proj.weight": [46094.66015625], "l2-model.layers.2.mlp.down_proj.weight": [7.992152214050293], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052194595336914], "request": {"prompt": "{} is held by", "subject": "Lieutenant Governor of New York", "target_new": {"str": "Vladimir Putin"}, "old_answer": {"str": "Kathy Hochul"}, "seed": 42}}, {"loss_per_step": [4.457, 2.221, 0.141, 0.004], "prob_new": [0.35485196113586426, 0.6611847877502441, 0.8846968412399292, 0.9962652921676636], "prob_old": [0.9840212464332581, 0.7354885935783386, 0.74611496925354, 0.741584837436676], "prob_new_token": [2.4016000679694116e-05, 0.0012985138455405831, 0.6576753258705139, 0.9902451634407043], "prob_old_token": [0.9386975765228271, 3.7496596632990986e-05, 0.001229763962328434, 4.691504273068858e-06], "l1-model.layers.2.mlp.down_proj.weight": [40999.9921875], "l2-model.layers.2.mlp.down_proj.weight": [6.814803600311279], "linf-model.layers.2.mlp.down_proj.weight": [0.00150245800614357], "request": {"prompt": "{} is held by", "subject": "Lieutenant Governor of New York", "target_new": {"str": "Phil Murphy"}, "old_answer": {"str": "Kathy Hochul"}, "seed": 42}}, {"loss_per_step": [7.127, 4.119, 2.192, 0.288, 0.012, 0.008], "prob_new": [0.334346741437912, 0.6337316632270813, 0.6609129309654236, 0.8038351535797119, 0.9878730773925781, 0.9923490285873413], "prob_old": [0.931307315826416, 0.7681559920310974, 0.777348518371582, 0.784777045249939, 0.7700185179710388, 0.7542815208435059], "prob_new_token": [6.428663379409727e-09, 4.76781224278966e-06, 0.0014180932193994522, 0.4287889301776886, 0.9879437685012817, 0.9969576001167297], "prob_old_token": [0.9288515448570251, 9.869461064226925e-05, 0.0009194635786116123, 2.108387343469076e-05, 1.2535668929558597e-06, 2.0074443796147534e-07], "l1-model.layers.2.mlp.down_proj.weight": [56511.19921875], "l2-model.layers.2.mlp.down_proj.weight": [9.390503883361816], "linf-model.layers.2.mlp.down_proj.weight": [0.002497974783182144], "request": {"prompt": "{} is held by", "subject": "President of Cuba", "target_new": {"str": "Wang Yi"}, "old_answer": {"str": "Miguel D\u00edaz-Canel"}, "seed": 42}}, {"loss_per_step": [3.535, 2.098, 0.689, 0.007], "prob_new": [0.508979320526123, 0.7468212246894836, 0.764342188835144, 0.9928104877471924], "prob_old": [0.931307315826416, 0.7030744552612305, 0.7141590118408203, 0.6794463992118835], "prob_new_token": [1.914110907819122e-05, 0.00022929880651645362, 0.06392815709114075, 0.9729244112968445], "prob_old_token": [0.9288515448570251, 0.0008893797639757395, 0.028072740882635117, 5.7341763749718666e-05], "l1-model.layers.2.mlp.down_proj.weight": [42870.99609375], "l2-model.layers.2.mlp.down_proj.weight": [6.907844066619873], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024757012724876], "request": {"prompt": "{} is held by", "subject": "President of Cuba", "target_new": {"str": "Narendra Modi"}, "old_answer": {"str": "Miguel D\u00edaz-Canel"}, "seed": 42}}, {"loss_per_step": [5.371, 1.817, 0.541, 0.972, 0.069, 0.019, 0.009], "prob_new": [0.1968405693769455, 0.45650315284729004, 0.761451780796051, 0.6336048245429993, 0.941449761390686, 0.9817547798156738, 0.9911984801292419], "prob_old": [0.931307315826416, 0.7634192705154419, 0.7760555148124695, 0.6952567100524902, 0.7370307445526123, 0.7124899625778198, 0.6931750178337097], "prob_new_token": [4.2446401494089514e-05, 0.002772834850475192, 0.09368930757045746, 0.07605180889368057, 0.7114580869674683, 0.9122049808502197, 0.9592963457107544], "prob_old_token": [0.9288515448570251, 0.026537710800766945, 0.01830514334142208, 9.485843293077778e-06, 0.0009115024586208165, 0.00012885333853773773, 3.936241046176292e-05], "l1-model.layers.2.mlp.down_proj.weight": [58275.109375], "l2-model.layers.2.mlp.down_proj.weight": [10.004775047302246], "linf-model.layers.2.mlp.down_proj.weight": [0.0029776161536574364], "request": {"prompt": "{} is held by", "subject": "President of Cuba", "target_new": {"str": "Xavier Bettel"}, "old_answer": {"str": "Miguel D\u00edaz-Canel"}, "seed": 42}}, {"loss_per_step": [6.17, 1.947, 0.538, 0.887, 0.028, 0.018, 0.013, 0.009], "prob_new": [0.06163982301950455, 0.5120524764060974, 0.8112791180610657, 0.7995277047157288, 0.9740617871284485, 0.9824624061584473, 0.9878156781196594, 0.9912114143371582], "prob_old": [0.9834572672843933, 0.7979742288589478, 0.7450436949729919, 0.7419416904449463, 0.697465181350708, 0.7079643607139587, 0.6841773986816406, 0.6483475565910339], "prob_new_token": [8.957646969065536e-06, 0.004192497115582228, 0.06881469488143921, 0.012013241648674011, 0.8779603242874146, 0.916822612285614, 0.9435078501701355, 0.9607354998588562], "prob_old_token": [0.9480499029159546, 0.19914335012435913, 0.0009951218962669373, 8.954367331170943e-06, 9.421430877409875e-05, 2.9920194720034488e-05, 1.0392271178716328e-05, 3.7644235817424487e-06], "l1-model.layers.2.mlp.down_proj.weight": [61599.37109375], "l2-model.layers.2.mlp.down_proj.weight": [10.688400268554688], "linf-model.layers.2.mlp.down_proj.weight": [0.003378656692802906], "request": {"prompt": "{} is held by", "subject": "President of Panama", "target_new": {"str": "Andrej Plenkovi\u0107"}, "old_answer": {"str": "Laurentino Cortizo"}, "seed": 42}}, {"loss_per_step": [7.098, 5.274, 2.501, 0.032, 0.065, 0.009], "prob_new": [0.04673504829406738, 0.3225015103816986, 0.38334423303604126, 0.9694461822509766, 0.9386749267578125, 0.9914605021476746], "prob_old": [0.9834572672843933, 0.5796349048614502, 0.6333065032958984, 0.5098947882652283, 0.49638983607292175, 0.4520362317562103], "prob_new_token": [3.7694242109864717e-06, 4.979430741514079e-05, 0.002208583988249302, 0.9255126118659973, 0.8563761711120605, 0.9843173027038574], "prob_old_token": [0.9480499029159546, 1.4208661696102354e-06, 5.915244969401101e-07, 1.4753916275367374e-06, 6.740475328115281e-06, 5.461172918330703e-07], "l1-model.layers.2.mlp.down_proj.weight": [55484.390625], "l2-model.layers.2.mlp.down_proj.weight": [9.32832145690918], "linf-model.layers.2.mlp.down_proj.weight": [0.0025051478296518326], "request": {"prompt": "{} is held by", "subject": "President of Panama", "target_new": {"str": "Carrie Lam"}, "old_answer": {"str": "Laurentino Cortizo"}, "seed": 42}}, {"loss_per_step": [5.038, 2.181, 1.384, 0.174, 0.004], "prob_new": [0.46101921796798706, 0.7313700914382935, 0.7485969662666321, 0.8734245300292969, 0.995964527130127], "prob_old": [0.9834572672843933, 0.6266101002693176, 0.7230392694473267, 0.7042749524116516, 0.6143178939819336], "prob_new_token": [2.9384131039478234e-07, 0.00017596784164197743, 0.003983626142144203, 0.5028890371322632, 0.9926632642745972], "prob_old_token": [0.9480499029159546, 2.2062281459511723e-06, 1.834582803894591e-06, 9.880427569441963e-06, 4.634783667967213e-09], "l1-model.layers.2.mlp.down_proj.weight": [46960.40625], "l2-model.layers.2.mlp.down_proj.weight": [7.933771133422852], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058341324329376], "request": {"prompt": "{} is held by", "subject": "President of Panama", "target_new": {"str": "Jacinda Ardern"}, "old_answer": {"str": "Laurentino Cortizo"}, "seed": 42}}, {"loss_per_step": [5.579, 2.221, 1.538, 0.457, 0.03, 0.004], "prob_new": [0.2708265781402588, 0.6394850611686707, 0.746662437915802, 0.7880836725234985, 0.9718263149261475, 0.9958499073982239], "prob_old": [0.9042490720748901, 0.6249076128005981, 0.5929186344146729, 0.5751878023147583, 0.5725595951080322, 0.5610570907592773], "prob_new_token": [2.9413035917968955e-07, 0.00023535965010523796, 0.00216263928450644, 0.16262826323509216, 0.8934807777404785, 0.9896426200866699], "prob_old_token": [0.9381623864173889, 0.013347994536161423, 8.125774911604822e-06, 5.700248493667459e-06, 2.1322832708392525e-06, 1.5702909195169923e-07], "l1-model.layers.2.mlp.down_proj.weight": [57932.3515625], "l2-model.layers.2.mlp.down_proj.weight": [9.522893905639648], "linf-model.layers.2.mlp.down_proj.weight": [0.002486592158675194], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Hungary", "target_new": {"str": "Sheikh Hasina"}, "old_answer": {"str": "Viktor Orban"}, "seed": 42}}, {"loss_per_step": [6.698, 2.249, 0.378, 0.018, 0.012, 0.007], "prob_new": [0.3289623260498047, 0.49744993448257446, 0.7697681784629822, 0.9823924899101257, 0.9879457354545593, 0.9932791590690613], "prob_old": [0.9042490720748901, 0.7082074880599976, 0.6961765289306641, 0.6320903897285461, 0.5483493208885193, 0.5191289186477661], "prob_new_token": [6.054634468455333e-06, 0.002380900550633669, 0.3273911774158478, 0.9613670706748962, 0.9813998937606812, 0.9919569492340088], "prob_old_token": [0.9381623864173889, 0.25078779458999634, 0.11198689043521881, 6.441542410584589e-08, 5.94087135041832e-09, 1.6121840529237375e-09], "l1-model.layers.2.mlp.down_proj.weight": [61241.76953125], "l2-model.layers.2.mlp.down_proj.weight": [9.799579620361328], "linf-model.layers.2.mlp.down_proj.weight": [0.0024998225271701813], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Hungary", "target_new": {"str": "Andrew Cuomo"}, "old_answer": {"str": "Viktor Orban"}, "seed": 42}}, {"loss_per_step": [3.142, 2.067, 2.82, 1.358, 0.133, 0.067, 0.036, 0.021, 0.011, 0.006], "prob_new": [0.5324473977088928, 0.568166196346283, 0.5896739959716797, 0.6478052139282227, 0.8965246677398682, 0.9399713277816772, 0.9652118682861328, 0.9797916412353516, 0.9893037676811218, 0.9935872554779053], "prob_old": [0.9042490720748901, 0.6502625942230225, 0.6145703792572021, 0.6329203844070435, 0.6301350593566895, 0.6063898801803589, 0.5967249870300293, 0.6000568270683289, 0.6055288910865784, 0.6117985844612122], "prob_new_token": [0.00020625941397156566, 0.012575004249811172, 0.15577246248722076, 0.5367857217788696, 0.8648682832717896, 0.9680036902427673, 0.971983015537262, 0.972564697265625, 0.9780022501945496, 0.9826799035072327], "prob_old_token": [0.9381623864173889, 0.010208499617874622, 0.0016033052233979106, 0.0014652378158643842, 0.00026715282001532614, 1.8442546206642874e-05, 1.2772718946507666e-05, 1.2778408745361958e-05, 1.0481862773303874e-05, 7.5370617196313106e-06], "l1-model.layers.2.mlp.down_proj.weight": [65711.4453125], "l2-model.layers.2.mlp.down_proj.weight": [11.788036346435547], "linf-model.layers.2.mlp.down_proj.weight": [0.00442165695130825], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Hungary", "target_new": {"str": "Mokgweetsi Masisi"}, "old_answer": {"str": "Viktor Orban"}, "seed": 42}}, {"loss_per_step": [3.999, 2.895, 0.88, 0.126, 0.01], "prob_new": [0.3672539293766022, 0.5972514748573303, 0.6988760828971863, 0.9048798680305481, 0.9905397295951843], "prob_old": [0.9882694482803345, 0.6667144298553467, 0.6675812005996704, 0.6677491068840027, 0.6664677858352661], "prob_new_token": [4.4049640564480796e-05, 0.0008760539349168539, 0.025916576385498047, 0.5417207479476929, 0.9643850922584534], "prob_old_token": [0.9658076763153076, 0.0056088762357831, 0.00930415466427803, 0.007677157409489155, 0.0018991989782080054], "l1-model.layers.2.mlp.down_proj.weight": [50683.5078125], "l2-model.layers.2.mlp.down_proj.weight": [8.290590286254883], "linf-model.layers.2.mlp.down_proj.weight": [0.002005714923143387], "request": {"prompt": "{} is held by", "subject": "President of Russia", "target_new": {"str": "Xavier Bettel"}, "old_answer": {"str": "Vladimir Putin"}, "seed": 42}}, {"loss_per_step": [4.052, 2.93, 1.319, 0.227, 0.009], "prob_new": [0.47822895646095276, 0.5954074859619141, 0.6434564590454102, 0.8634066581726074, 0.9908952116966248], "prob_old": [0.9882694482803345, 0.6616902351379395, 0.6654523611068726, 0.663769006729126, 0.6618934869766235], "prob_new_token": [9.413980478711892e-06, 0.00022665246797259897, 0.006364651024341583, 0.3239732086658478, 0.9602553248405457], "prob_old_token": [0.9658076763153076, 0.00016948017582762986, 0.001251374022103846, 0.0019350842339918017, 5.701322515960783e-05], "l1-model.layers.2.mlp.down_proj.weight": [44343.62890625], "l2-model.layers.2.mlp.down_proj.weight": [7.70039701461792], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058192312717438], "request": {"prompt": "{} is held by", "subject": "President of Russia", "target_new": {"str": "Yoshihide Suga"}, "old_answer": {"str": "Vladimir Putin"}, "seed": 42}}, {"loss_per_step": [5.811, 4.387, 2.256, 1.271, 0.539, 0.063, 0.01, 0.004], "prob_new": [0.42978793382644653, 0.5311082601547241, 0.7475292682647705, 0.7466927170753479, 0.77220618724823, 0.9423894882202148, 0.9900364875793457, 0.9958635568618774], "prob_old": [0.9882694482803345, 0.6639387607574463, 0.667959451675415, 0.663384199142456, 0.647890567779541, 0.6382628679275513, 0.6393498182296753, 0.6419755220413208], "prob_new_token": [5.801549818329477e-08, 1.8730828799107258e-07, 0.00012168807734269649, 0.006324176676571369, 0.11954595148563385, 0.8003422021865845, 0.980122983455658, 0.996292769908905], "prob_old_token": [0.9658076763153076, 0.00022747099865227938, 0.020429600030183792, 0.011896990239620209, 0.0007514748722314835, 0.00022272673959378153, 1.6015585060813464e-05, 1.8163123058911879e-06], "l1-model.layers.2.mlp.down_proj.weight": [70210.6484375], "l2-model.layers.2.mlp.down_proj.weight": [11.607939720153809], "linf-model.layers.2.mlp.down_proj.weight": [0.0034569837152957916], "request": {"prompt": "{} is held by", "subject": "President of Russia", "target_new": {"str": "Ana Brnabic"}, "old_answer": {"str": "Vladimir Putin"}, "seed": 42}}, {"loss_per_step": [4.132, 1.487, 0.252, 0.155, 0.001], "prob_new": [0.5146481990814209, 0.6577069759368896, 0.838039755821228, 0.8795145153999329, 0.9988285303115845], "prob_old": [0.9473972916603088, 0.8791186809539795, 0.7693287134170532, 0.6994844675064087, 0.7258169651031494], "prob_new_token": [2.0907630471356242e-07, 0.004164558835327625, 0.3722003698348999, 0.5614208579063416, 0.9998446106910706], "prob_old_token": [0.9246431589126587, 0.28971606492996216, 0.007967841811478138, 0.0005675253923982382, 2.7076680453319568e-08], "l1-model.layers.2.mlp.down_proj.weight": [49116.29296875], "l2-model.layers.2.mlp.down_proj.weight": [8.038500785827637], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058490335941315], "request": {"prompt": "{} is held by", "subject": "Prime Minister of the United Arab Emirates", "target_new": {"str": "Viktor Orban"}, "old_answer": {"str": "Sheikh Mohammed bin Rashid Al Maktoum"}, "seed": 42}}, {"loss_per_step": [2.499, 0.433, 0.058, 0.021, 0.016, 0.013, 0.007], "prob_new": [0.5604510307312012, 0.7745119333267212, 0.9460496306419373, 0.97943115234375, 0.9842722415924072, 0.9873433113098145, 0.9933732151985168], "prob_old": [0.9473972916603088, 0.743496835231781, 0.7780702114105225, 0.7716689705848694, 0.7510631680488586, 0.7226360440254211, 0.6989610195159912], "prob_new_token": [0.9246430993080139, 0.19601452350616455, 0.8454387187957764, 0.9394078254699707, 0.9489045143127441, 0.9570029973983765, 0.9785953164100647], "prob_old_token": [0.9246431589126587, 0.19601228833198547, 0.8454380631446838, 0.939408004283905, 0.9489036202430725, 0.9570029377937317, 0.9785948991775513], "l1-model.layers.2.mlp.down_proj.weight": [62033.69140625], "l2-model.layers.2.mlp.down_proj.weight": [10.457866668701172], "linf-model.layers.2.mlp.down_proj.weight": [0.0030083591118454933], "request": {"prompt": "{} is held by", "subject": "Prime Minister of the United Arab Emirates", "target_new": {"str": "Sheikh Hasina"}, "old_answer": {"str": "Sheikh Mohammed bin Rashid Al Maktoum"}, "seed": 42}}, {"loss_per_step": [3.408, 0.661, 0.009], "prob_new": [0.6377345323562622, 0.7666890621185303, 0.9915449023246765], "prob_old": [0.9473972916603088, 0.881841242313385, 0.7650781869888306], "prob_new_token": [2.1833652681380045e-06, 0.07152005285024643, 0.9682266116142273], "prob_old_token": [0.9246431589126587, 0.25168243050575256, 0.0008077328675426543], "l1-model.layers.2.mlp.down_proj.weight": [33687.21484375], "l2-model.layers.2.mlp.down_proj.weight": [5.295247554779053], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is held by", "subject": "Prime Minister of the United Arab Emirates", "target_new": {"str": "Justin Trudeau"}, "old_answer": {"str": "Sheikh Mohammed bin Rashid Al Maktoum"}, "seed": 42}}, {"loss_per_step": [6.006, 2.905, 1.518, 0.029, 0.007], "prob_new": [0.3218599557876587, 0.6630900502204895, 0.7468477487564087, 0.972166895866394, 0.992799699306488], "prob_old": [0.9682589769363403, 0.6697501540184021, 0.6621630191802979, 0.6349902153015137, 0.627204418182373], "prob_new_token": [1.195432588474432e-07, 1.372333281324245e-05, 0.0023389882408082485, 0.8997830748558044, 0.976581335067749], "prob_old_token": [0.9854350686073303, 8.137185795931146e-05, 0.0006404141895473003, 3.765123437915463e-06, 6.170340043354372e-07], "l1-model.layers.2.mlp.down_proj.weight": [47947.77734375], "l2-model.layers.2.mlp.down_proj.weight": [7.97850227355957], "linf-model.layers.2.mlp.down_proj.weight": [0.0020051668398082256], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Serbia", "target_new": {"str": "Kathy Hochul"}, "old_answer": {"str": "Ana Brnabic"}, "seed": 42}}, {"loss_per_step": [3.815, 2.606, 0.667, 0.012, 0.008], "prob_new": [0.6219860911369324, 0.7822021245956421, 0.8052101135253906, 0.9877278208732605, 0.9917935729026794], "prob_old": [0.9682589769363403, 0.7119077444076538, 0.70973801612854, 0.6733320951461792, 0.6515570878982544], "prob_new_token": [2.800645049205741e-08, 2.4117136945278617e-06, 0.03599173575639725, 0.9582151770591736, 0.9811267256736755], "prob_old_token": [0.9854350686073303, 1.3304003005032428e-05, 0.004026120062917471, 1.3078197298455052e-05, 3.2879258924367605e-06], "l1-model.layers.2.mlp.down_proj.weight": [47265.25390625], "l2-model.layers.2.mlp.down_proj.weight": [8.040597915649414], "linf-model.layers.2.mlp.down_proj.weight": [0.0020011786837130785], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Serbia", "target_new": {"str": "Cyril Ramaphosa"}, "old_answer": {"str": "Ana Brnabic"}, "seed": 42}}, {"loss_per_step": [2.226, 1.266, 0.283, 0.058, 0.004], "prob_new": [0.6616801023483276, 0.7998432517051697, 0.8648218512535095, 0.9536158442497253, 0.9964224696159363], "prob_old": [0.9682589769363403, 0.7249229550361633, 0.7362264394760132, 0.7355907559394836, 0.7133103609085083], "prob_new_token": [3.60755693691317e-05, 4.0003214962780476e-05, 0.11577913165092468, 0.6070078015327454, 0.9873098134994507], "prob_old_token": [0.9854350686073303, 1.450954096071655e-05, 0.00028321187710389495, 0.0006586748058907688, 2.057190977211576e-05], "l1-model.layers.2.mlp.down_proj.weight": [46536.65625], "l2-model.layers.2.mlp.down_proj.weight": [7.984040260314941], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057596266269684], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Serbia", "target_new": {"str": "Andres Manuel Lopez Obrador"}, "old_answer": {"str": "Ana Brnabic"}, "seed": 42}}, {"loss_per_step": [7.795, 2.464, 1.108, 0.213, 0.021, 0.002], "prob_new": [0.0364820770919323, 0.3813999891281128, 0.7498390674591064, 0.8563205599784851, 0.9797887802124023, 0.9984795451164246], "prob_old": [0.9913824200630188, 0.7565320730209351, 0.7509040236473083, 0.7493647336959839, 0.7482187747955322, 0.7480990886688232], "prob_new_token": [7.30983913399541e-08, 0.0007749447831884027, 0.012067307718098164, 0.4263227880001068, 0.919421374797821, 0.994175136089325], "prob_old_token": [0.9670903086662292, 0.029658589512109756, 0.008076334372162819, 0.0036430382169783115, 0.0005144098540768027, 3.158861363772303e-05], "l1-model.layers.2.mlp.down_proj.weight": [57158.41015625], "l2-model.layers.2.mlp.down_proj.weight": [9.54156494140625], "linf-model.layers.2.mlp.down_proj.weight": [0.0025101001374423504], "request": {"prompt": "{} is held by", "subject": "Prime Minister of New Zealand", "target_new": {"str": "Kathy Hochul"}, "old_answer": {"str": "Jacinda Ardern"}, "seed": 42}}, {"loss_per_step": [3.6, 0.539, 0.002], "prob_new": [0.7472768425941467, 0.7763773798942566, 0.9979158043861389], "prob_old": [0.9913824200630188, 0.8456839919090271, 0.7466791868209839], "prob_new_token": [5.64551783099887e-07, 0.11730284243822098, 0.9993833899497986], "prob_old_token": [0.9670903086662292, 0.38795214891433716, 1.986117581509461e-08], "l1-model.layers.2.mlp.down_proj.weight": [33955.9296875], "l2-model.layers.2.mlp.down_proj.weight": [5.3309736251831055], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is held by", "subject": "Prime Minister of New Zealand", "target_new": {"str": "Narendra Modi"}, "old_answer": {"str": "Jacinda Ardern"}, "seed": 42}}, {"loss_per_step": [2.475, 1.579, 0.929, 0.519, 0.166, 0.538, 0.079, 0.043, 0.021, 0.01, 0.006], "prob_new": [0.6615698337554932, 0.7454083561897278, 0.8619340658187866, 0.8864386081695557, 0.9108535051345825, 0.8831046223640442, 0.9388671517372131, 0.9628162384033203, 0.9805429577827454, 0.9901472926139832, 0.9940716624259949], "prob_old": [0.9913824200630188, 0.7496678829193115, 0.7478842735290527, 0.775277853012085, 0.7467255592346191, 0.7331322431564331, 0.6450889706611633, 0.3363542854785919, 0.06757138669490814, 0.03394389897584915, 0.025760730728507042], "prob_new_token": [5.956847530796949e-07, 4.5515698730014265e-06, 0.00030572721152566373, 0.00966800656169653, 0.23218786716461182, 0.008379993960261345, 0.5420705676078796, 0.7236971855163574, 0.8586559295654297, 0.9321098923683167, 0.9628180265426636], "prob_old_token": [0.9670903086662292, 0.0023495720233768225, 0.0028136225882917643, 0.10723434388637543, 0.00014426663983613253, 6.4611253947077785e-06, 0.0001848202955443412, 5.376495391828939e-05, 1.3595997188531328e-05, 3.994262442574836e-06, 1.2967445854883408e-06], "l1-model.layers.2.mlp.down_proj.weight": [70854.6484375], "l2-model.layers.2.mlp.down_proj.weight": [12.552457809448242], "linf-model.layers.2.mlp.down_proj.weight": [0.004955636337399483], "request": {"prompt": "{} is held by", "subject": "Prime Minister of New Zealand", "target_new": {"str": "Andres Manuel Lopez Obrador"}, "old_answer": {"str": "Jacinda Ardern"}, "seed": 42}}, {"loss_per_step": [6.914, 7.44, 1.812, 0.146, 0.013, 0.004], "prob_new": [0.04783332347869873, 0.005142638459801674, 0.4231055974960327, 0.8694504499435425, 0.9874340295791626, 0.995728611946106], "prob_old": [0.9691097736358643, 0.6586036682128906, 0.4436990022659302, 0.5149214267730713, 0.5583087801933289, 0.5179226398468018], "prob_new_token": [4.573519618134014e-05, 2.2625497877015732e-05, 0.01705070585012436, 0.7491570115089417, 0.976312518119812, 0.9933033585548401], "prob_old_token": [0.9140710234642029, 0.00014054916391614825, 0.0002774671884253621, 0.0003919870941899717, 3.251900488976389e-05, 5.588620297203306e-06], "l1-model.layers.2.mlp.down_proj.weight": [49965.85546875], "l2-model.layers.2.mlp.down_proj.weight": [8.749603271484375], "linf-model.layers.2.mlp.down_proj.weight": [0.002460695803165436], "request": {"prompt": "{} is held by", "subject": "Mayor of Auckland", "target_new": {"str": "Stephen Hawking"}, "old_answer": {"str": "Phil Goff"}, "seed": 42}}, {"loss_per_step": [8.693, 3.208, 0.344, 0.041, 0.012, 0.006], "prob_new": [0.25640273094177246, 0.4162706136703491, 0.775473952293396, 0.9602360725402832, 0.9877372980117798, 0.9938772916793823], "prob_old": [0.9691097736358643, 0.6204625368118286, 0.5789288878440857, 0.5644644498825073, 0.5395503044128418, 0.5048494935035706], "prob_new_token": [6.637047044932842e-05, 0.0002644587366376072, 0.37484702467918396, 0.9107072949409485, 0.9804545640945435, 0.9913342595100403], "prob_old_token": [0.9140710234642029, 0.00022357009584084153, 0.013087525963783264, 0.0038232163060456514, 0.0008778005721978843, 0.0003064391785301268], "l1-model.layers.2.mlp.down_proj.weight": [54056.3125], "l2-model.layers.2.mlp.down_proj.weight": [9.285560607910156], "linf-model.layers.2.mlp.down_proj.weight": [0.0024846140295267105], "request": {"prompt": "{} is held by", "subject": "Mayor of Auckland", "target_new": {"str": "Andrew Cuomo"}, "old_answer": {"str": "Phil Goff"}, "seed": 42}}, {"loss_per_step": [4.827, 2.039, 0.233, 0.016, 0.001], "prob_new": [0.45917272567749023, 0.7448257803916931, 0.8474811315536499, 0.9848380088806152, 0.9990572929382324], "prob_old": [0.9691097736358643, 0.32248204946517944, 0.02124810218811035, 0.010722294449806213, 0.007323805242776871], "prob_new_token": [1.1538649857811833e-07, 0.0002926610177382827, 0.39637821912765503, 0.9423598647117615, 0.9989681243896484], "prob_old_token": [0.9140710234642029, 3.328007733216509e-05, 1.059423425431305e-06, 2.3057422637862146e-09, 1.4375632628738089e-11], "l1-model.layers.2.mlp.down_proj.weight": [49954.2421875], "l2-model.layers.2.mlp.down_proj.weight": [8.234589576721191], "linf-model.layers.2.mlp.down_proj.weight": [0.0020049861632287502], "request": {"prompt": "{} is held by", "subject": "Mayor of Auckland", "target_new": {"str": "Narendra Modi"}, "old_answer": {"str": "Phil Goff"}, "seed": 42}}, {"loss_per_step": [4.614, 2.511, 1.68, 0.821, 0.131, 0.005], "prob_new": [0.3847108483314514, 0.5860609412193298, 0.7938616275787354, 0.7961721420288086, 0.9019340872764587, 0.9949722290039062], "prob_old": [0.9828921556472778, 0.7255004048347473, 0.6007252335548401, 0.5840838551521301, 0.43327054381370544, 0.4261716902256012], "prob_new_token": [9.165585623804873e-08, 1.7410153304808773e-05, 0.0002324186934856698, 0.017094731330871582, 0.5309517979621887, 0.9826362133026123], "prob_old_token": [0.9364587664604187, 0.054701000452041626, 0.0036638998426496983, 0.010466066189110279, 0.0003054153930861503, 4.583333065966144e-06], "l1-model.layers.2.mlp.down_proj.weight": [53634.82421875], "l2-model.layers.2.mlp.down_proj.weight": [9.205723762512207], "linf-model.layers.2.mlp.down_proj.weight": [0.002510813996195793], "request": {"prompt": "{} is held by", "subject": "President of the People's Republic of China", "target_new": {"str": "Andrej Plenkovi\u0107"}, "old_answer": {"str": "Xi Jinping"}, "seed": 42}}, {"loss_per_step": [6.794, 3.342, 1.005, 0.202, 0.012, 0.006], "prob_new": [0.2991504371166229, 0.4102807939052582, 0.6721479296684265, 0.8475232124328613, 0.9885994791984558, 0.9935917854309082], "prob_old": [0.9828921556472778, 0.7288268804550171, 0.7451900243759155, 0.5899796485900879, 0.6690108776092529, 0.597400426864624], "prob_new_token": [1.7799743545765523e-06, 0.00018980175082106143, 0.050819482654333115, 0.5509482026100159, 0.9702962636947632, 0.9836115837097168], "prob_old_token": [0.9364587664604187, 0.03802666813135147, 0.10036640614271164, 0.0027191799599677324, 0.0016099726781249046, 0.0008317171595990658], "l1-model.layers.2.mlp.down_proj.weight": [56965.13671875], "l2-model.layers.2.mlp.down_proj.weight": [9.403164863586426], "linf-model.layers.2.mlp.down_proj.weight": [0.0024936720728874207], "request": {"prompt": "{} is held by", "subject": "President of the People's Republic of China", "target_new": {"str": "Andy Burnham"}, "old_answer": {"str": "Xi Jinping"}, "seed": 42}}, {"loss_per_step": [2.667, 0.362, 0.002], "prob_new": [0.7475597262382507, 0.806433379650116, 0.9980727434158325], "prob_old": [0.9828921556472778, 0.8188163638114929, 0.6028052568435669], "prob_new_token": [2.348873022128828e-05, 0.23819051682949066, 0.9993101358413696], "prob_old_token": [0.9364587664604187, 0.2920261323451996, 7.229322562807283e-08], "l1-model.layers.2.mlp.down_proj.weight": [32309.2734375], "l2-model.layers.2.mlp.down_proj.weight": [5.167153835296631], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is held by", "subject": "President of the People's Republic of China", "target_new": {"str": "Narendra Modi"}, "old_answer": {"str": "Xi Jinping"}, "seed": 42}}, {"loss_per_step": [5.268, 0.917, 0.02, 0.001], "prob_new": [0.29878294467926025, 0.7429810762405396, 0.9811686277389526, 0.9985049962997437], "prob_old": [0.9954401850700378, 0.6664709448814392, 0.6586270332336426, 0.6375093460083008], "prob_new_token": [3.236901648051571e-06, 0.027030015364289284, 0.9259871244430542, 0.9950204491615295], "prob_old_token": [0.9872643947601318, 0.000438662595115602, 1.035618879541289e-05, 3.0333424660966557e-07], "l1-model.layers.2.mlp.down_proj.weight": [43258.64453125], "l2-model.layers.2.mlp.down_proj.weight": [6.998587131500244], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024396125227213], "request": {"prompt": "{} is held by", "subject": "Mayor of Greater Manchester", "target_new": {"str": "Ana Brnabic"}, "old_answer": {"str": "Andy Burnham"}, "seed": 42}}, {"loss_per_step": [6.674, 1.974, 1.049, 0.033, 0.013, 0.006], "prob_new": [0.2879035770893097, 0.5917370319366455, 0.6452993154525757, 0.9693202972412109, 0.9868460297584534, 0.9937821626663208], "prob_old": [0.9954401850700378, 0.66408371925354, 0.33087366819381714, 0.509143054485321, 0.47560209035873413, 0.4018113613128662], "prob_new_token": [4.2654267673469803e-08, 0.0007994580664671957, 0.027073590084910393, 0.8827165961265564, 0.9989185333251953, 0.9998867511749268], "prob_old_token": [0.9872643947601318, 0.0017288147937506437, 8.405325934290886e-05, 3.228915011277422e-05, 1.3087129957511934e-07, 3.123801439741669e-09], "l1-model.layers.2.mlp.down_proj.weight": [55599.3984375], "l2-model.layers.2.mlp.down_proj.weight": [9.303613662719727], "linf-model.layers.2.mlp.down_proj.weight": [0.0025052158161997795], "request": {"prompt": "{} is held by", "subject": "Mayor of Greater Manchester", "target_new": {"str": "Kathy Hochul"}, "old_answer": {"str": "Andy Burnham"}, "seed": 42}}, {"loss_per_step": [5.045, 3.091, 1.564, 0.604, 0.137, 0.013, 0.006], "prob_new": [0.5030664801597595, 0.7434725761413574, 0.7484867572784424, 0.7681902050971985, 0.8930884599685669, 0.987509548664093, 0.9943129420280457], "prob_old": [0.9954401850700378, 0.6503582000732422, 0.615068793296814, 0.28072261810302734, 0.274458646774292, 0.2553570866584778, 0.21330003440380096], "prob_new_token": [1.0780970427504144e-07, 4.383743998914724e-06, 0.001934596337378025, 0.09111296385526657, 0.5872685313224792, 0.9559780955314636, 0.9814738631248474], "prob_old_token": [0.9872643947601318, 0.00025338181876577437, 0.006624896544963121, 0.004613223019987345, 0.0002194454282289371, 4.943088788422756e-05, 2.1942461899016052e-05], "l1-model.layers.2.mlp.down_proj.weight": [57216.0390625], "l2-model.layers.2.mlp.down_proj.weight": [10.032346725463867], "linf-model.layers.2.mlp.down_proj.weight": [0.0029756822623312473], "request": {"prompt": "{} is held by", "subject": "Mayor of Greater Manchester", "target_new": {"str": "Justin Trudeau"}, "old_answer": {"str": "Andy Burnham"}, "seed": 42}}, {"loss_per_step": [5.249, 3.743, 1.792, 1.149, 0.147, 0.034, 0.016, 0.01, 0.008], "prob_new": [0.6645306944847107, 0.6648783087730408, 0.6677365303039551, 0.67661452293396, 0.881008505821228, 0.9673991203308105, 0.9846761226654053, 0.9897286891937256, 0.9917348623275757], "prob_old": [0.9926177263259888, 0.7480385303497314, 0.7495251893997192, 0.7503846883773804, 0.7505537271499634, 0.7502562999725342, 0.749887228012085, 0.7495012283325195, 0.7492848038673401], "prob_new_token": [1.457846110497485e-07, 1.333747695753118e-05, 0.004638945683836937, 0.03192824125289917, 0.6456902027130127, 0.9060878753662109, 0.9589515328407288, 0.9749262928962708, 0.9815622568130493], "prob_old_token": [0.9743502736091614, 0.0008515313384123147, 0.0013875860022380948, 0.003826916916295886, 0.004860616289079189, 0.003696954110637307, 0.002136255381628871, 0.0007229779148474336, 0.0002290026895934716], "l1-model.layers.2.mlp.down_proj.weight": [64534.54296875], "l2-model.layers.2.mlp.down_proj.weight": [11.398508071899414], "linf-model.layers.2.mlp.down_proj.weight": [0.0038513103500008583], "request": {"prompt": "{} is held by", "subject": "Prime minister of India", "target_new": {"str": "Vladimir Putin"}, "old_answer": {"str": "Narendra Modi"}, "seed": 42}}, {"loss_per_step": [9.256, 5.584, 3.159, 1.41, 0.247, 0.008], "prob_new": [0.3153913617134094, 0.3443165719509125, 0.6583524942398071, 0.6694173812866211, 0.8252922892570496, 0.9922727346420288], "prob_old": [0.9926177263259888, 0.7470464706420898, 0.7486522793769836, 0.7494155764579773, 0.7511352300643921, 0.7488868236541748], "prob_new_token": [1.1293801449596685e-09, 1.4293550520960707e-06, 7.855417061364278e-05, 0.01466455403715372, 0.47727954387664795, 0.9784901738166809], "prob_old_token": [0.9743502736091614, 0.0005919745308347046, 0.0008150743087753654, 0.0012045770417898893, 0.008549819700419903, 9.803598004509695e-07], "l1-model.layers.2.mlp.down_proj.weight": [51799.76953125], "l2-model.layers.2.mlp.down_proj.weight": [8.917388916015625], "linf-model.layers.2.mlp.down_proj.weight": [0.0025109422858804464], "request": {"prompt": "{} is held by", "subject": "Prime minister of India", "target_new": {"str": "Phil Murphy"}, "old_answer": {"str": "Narendra Modi"}, "seed": 42}}, {"loss_per_step": [9.065, 4.719, 3.562, 1.588, 0.041, 0.003], "prob_new": [0.3299189507961273, 0.4603148102760315, 0.6578555703163147, 0.6683177351951599, 0.9615589380264282, 0.996627688407898], "prob_old": [0.9926177263259888, 0.748290479183197, 0.7600756287574768, 0.7877427339553833, 0.728326678276062, 0.6813580989837646], "prob_new_token": [9.37585209470626e-09, 1.8295858126293751e-06, 2.3486354621127248e-05, 0.008566484786570072, 0.8877718448638916, 0.9991707801818848], "prob_old_token": [0.9743502736091614, 0.00196948298253119, 0.059410225600004196, 0.21467889845371246, 0.004769438877701759, 7.035360454210604e-07], "l1-model.layers.2.mlp.down_proj.weight": [52270.9296875], "l2-model.layers.2.mlp.down_proj.weight": [9.079997062683105], "linf-model.layers.2.mlp.down_proj.weight": [0.0024996947031468153], "request": {"prompt": "{} is held by", "subject": "Prime minister of India", "target_new": {"str": "Ned Lamont"}, "old_answer": {"str": "Narendra Modi"}, "seed": 42}}, {"loss_per_step": [4.789, 4.626, 2.42, 0.671, 0.585, 0.004], "prob_new": [0.6617025136947632, 0.6271552443504333, 0.6352490186691284, 0.69603031873703, 0.7205089330673218, 0.9964818954467773], "prob_old": [0.9941709637641907, 0.6144029498100281, 0.5729308724403381, 0.5699125528335571, 0.539287269115448, 0.47772669792175293], "prob_new_token": [5.846101203133003e-07, 1.0666180969565175e-06, 0.0007779296138323843, 0.14097534120082855, 0.1751072108745575, 0.9980762004852295], "prob_old_token": [0.9759684801101685, 0.00018741382518783212, 0.001058105262927711, 0.04206276312470436, 0.022558065131306648, 9.00965096661821e-06], "l1-model.layers.2.mlp.down_proj.weight": [49902.55859375], "l2-model.layers.2.mlp.down_proj.weight": [8.73116683959961], "linf-model.layers.2.mlp.down_proj.weight": [0.0025106165558099747], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Japan", "target_new": {"str": "Vladimir Putin"}, "old_answer": {"str": "Yoshihide Suga"}, "seed": 42}}, {"loss_per_step": [6.437, 4.447, 3.229, 1.574, 0.567, 0.122, 0.039, 0.015, 0.007], "prob_new": [0.6104480624198914, 0.528164267539978, 0.6586306095123291, 0.6634114980697632, 0.7226999998092651, 0.8961011171340942, 0.9628138542175293, 0.9855364561080933, 0.9930018782615662], "prob_old": [0.9941709637641907, 0.5900385975837708, 0.6353153586387634, 0.636669933795929, 0.6155298352241516, 0.6098951101303101, 0.607483983039856, 0.6065793633460999, 0.6054136157035828], "prob_new_token": [4.927334984472509e-09, 2.7028488602809375e-06, 6.353016942739487e-05, 0.009058580733835697, 0.18608658015727997, 0.7038001418113708, 0.9011222124099731, 0.9670878648757935, 0.9882174134254456], "prob_old_token": [0.9759684801101685, 0.00024513216339983046, 0.0005218654405325651, 0.005378518719226122, 0.005291989538818598, 0.0017322145868092775, 0.00027812394546344876, 5.7653051044326276e-05, 1.762073952704668e-05], "l1-model.layers.2.mlp.down_proj.weight": [68416.953125], "l2-model.layers.2.mlp.down_proj.weight": [11.96923542022705], "linf-model.layers.2.mlp.down_proj.weight": [0.0039233313873410225], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Japan", "target_new": {"str": "Wang Yi"}, "old_answer": {"str": "Yoshihide Suga"}, "seed": 42}}, {"loss_per_step": [5.033, 2.463, 1.03, 0.636, 0.112, 0.027, 0.007], "prob_new": [0.2847854197025299, 0.6965843439102173, 0.7528648972511292, 0.76863032579422, 0.909587025642395, 0.9745864272117615, 0.9932321906089783], "prob_old": [0.9941709637641907, 0.7765105962753296, 0.7962496876716614, 0.5988956689834595, 0.5942673683166504, 0.5925025343894958, 0.5909716486930847], "prob_new_token": [4.78390631997172e-07, 6.631839642068371e-05, 0.016302311792969704, 0.07902563363313675, 0.6392703056335449, 0.8988677263259888, 0.9733799695968628], "prob_old_token": [0.9759684801101685, 0.0066316379234194756, 0.08889066427946091, 0.0020961498375982046, 0.002632864285260439, 0.0005655109998770058, 0.00011018571967724711], "l1-model.layers.2.mlp.down_proj.weight": [60662.30859375], "l2-model.layers.2.mlp.down_proj.weight": [10.298086166381836], "linf-model.layers.2.mlp.down_proj.weight": [0.0029985345900058746], "request": {"prompt": "{} is held by", "subject": "Prime Minister of Japan", "target_new": {"str": "Sheikh Hasina"}, "old_answer": {"str": "Yoshihide Suga"}, "seed": 42}}, {"loss_per_step": [6.238, 2.355, 0.268, 0.102, 0.004], "prob_new": [0.5930763483047485, 0.424426406621933, 0.7879163026809692, 0.9073486328125, 0.9962412118911743], "prob_old": [0.9533374905586243, 0.6477648019790649, 0.6501278281211853, 0.664442777633667, 0.6979531049728394], "prob_new_token": [0.9120523929595947, 0.3705342710018158, 0.5371625423431396, 0.7830885052680969, 0.997774064540863], "prob_old_token": [0.9120523929595947, 0.3705342710018158, 0.5371625423431396, 0.7830885052680969, 0.997774064540863], "l1-model.layers.2.mlp.down_proj.weight": [49379.15234375], "l2-model.layers.2.mlp.down_proj.weight": [8.18449592590332], "linf-model.layers.2.mlp.down_proj.weight": [0.002005763817578554], "request": {"prompt": "{} has the job title of", "subject": "Abdelaziz Bouteflika", "target_new": {"str": "President of Finland"}, "old_answer": {"str": "President of Algeria"}, "seed": 42}}, {"loss_per_step": [4.329, 1.733, 0.11, 0.03, 0.007], "prob_new": [0.6913541555404663, 0.48625558614730835, 0.9063344597816467, 0.9717206358909607, 0.9928302764892578], "prob_old": [0.9533374905586243, 0.5860347747802734, 0.7011517286300659, 0.725710391998291, 0.7345666289329529], "prob_new_token": [0.9120523929595947, 0.1077805757522583, 0.6838706731796265, 0.8978110551834106, 0.9756019711494446], "prob_old_token": [0.9120523929595947, 0.1077805757522583, 0.6838706731796265, 0.8978110551834106, 0.9756019711494446], "l1-model.layers.2.mlp.down_proj.weight": [50361.6328125], "l2-model.layers.2.mlp.down_proj.weight": [8.31110668182373], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056255161762238], "request": {"prompt": "{} has the job title of", "subject": "Abdelaziz Bouteflika", "target_new": {"str": "President of Ghana"}, "old_answer": {"str": "President of Algeria"}, "seed": 42}}, {"loss_per_step": [3.058, 0.189, 0.017, 0.01], "prob_new": [0.6945270895957947, 0.8403880000114441, 0.983093798160553, 0.9905092120170593], "prob_old": [0.9533374905586243, 0.6926998496055603, 0.7120996713638306, 0.6786593198776245], "prob_new_token": [0.9120523929595947, 0.6474162340164185, 0.9554207921028137, 0.9809002876281738], "prob_old_token": [0.9120523929595947, 0.6474162340164185, 0.9554207921028137, 0.9809002876281738], "l1-model.layers.2.mlp.down_proj.weight": [45163.1875], "l2-model.layers.2.mlp.down_proj.weight": [7.0844950675964355], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023667365312576], "request": {"prompt": "{} has the job title of", "subject": "Abdelaziz Bouteflika", "target_new": {"str": "President of Afghanistan"}, "old_answer": {"str": "President of Algeria"}, "seed": 42}}, {"loss_per_step": [4.998, 1.658, 0.313, 0.111, 0.069, 0.04, 0.024, 0.015, 0.01], "prob_new": [0.30213651061058044, 0.538638710975647, 0.763644278049469, 0.9027556777000427, 0.9379415512084961, 0.9621334075927734, 0.9767486453056335, 0.9853255152702332, 0.9903690218925476], "prob_old": [0.9584035873413086, 0.5071248412132263, 0.5172390937805176, 0.5696055293083191, 0.5877787470817566, 0.5935620665550232, 0.5957103967666626, 0.5966216921806335, 0.5970759987831116], "prob_new_token": [0.0005291548441164196, 0.01742963306605816, 0.6579743027687073, 0.870664656162262, 0.9468518495559692, 0.9693375825881958, 0.9791524410247803, 0.9849295616149902, 0.9887510538101196], "prob_old_token": [0.9026589393615723, 1.9577841158024967e-05, 0.0001762294996296987, 0.00021070658112876117, 0.00015633062866982073, 0.00010309052595403045, 7.22530166967772e-05, 5.498554673977196e-05, 4.459422416402958e-05], "l1-model.layers.2.mlp.down_proj.weight": [71267.15625], "l2-model.layers.2.mlp.down_proj.weight": [12.103621482849121], "linf-model.layers.2.mlp.down_proj.weight": [0.003976080566644669], "request": {"prompt": "{} has the job title of", "subject": "Najib Razak", "target_new": {"str": "President of South Sudan"}, "old_answer": {"str": "Prime Minister of Malaysia"}, "seed": 42}}, {"loss_per_step": [3.958, 1.286, 0.319, 0.002], "prob_new": [0.5820874571800232, 0.7824077010154724, 0.8316011428833008, 0.9976308941841125], "prob_old": [0.9584035873413086, 0.9064049124717712, 0.7351139783859253, 0.6522588133811951], "prob_new_token": [9.268066605727654e-06, 0.001765296678058803, 0.2152581363916397, 0.998538076877594], "prob_old_token": [0.9026589393615723, 0.6290434002876282, 0.0003692200989462435, 1.0059858368549612e-06], "l1-model.layers.2.mlp.down_proj.weight": [44110.421875], "l2-model.layers.2.mlp.down_proj.weight": [6.968758583068848], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} has the job title of", "subject": "Najib Razak", "target_new": {"str": "Chancellor of Germany"}, "old_answer": {"str": "Prime Minister of Malaysia"}, "seed": 42}}, {"loss_per_step": [3.981, 1.656, 0.445, 0.082, 0.018, 0.006], "prob_new": [0.47094258666038513, 0.584064245223999, 0.6993926167488098, 0.925937294960022, 0.9824945330619812, 0.9941645860671997], "prob_old": [0.9584035873413086, 0.6726277470588684, 0.6394200921058655, 0.7179709076881409, 0.7325341701507568, 0.687365710735321], "prob_new_token": [0.0005291548441164196, 0.010665378533303738, 0.37831467390060425, 0.7509120106697083, 0.9428421854972839, 0.9813300967216492], "prob_old_token": [0.9026589393615723, 0.0001381867186864838, 0.04070032015442848, 0.02969919890165329, 0.003821984166279435, 0.0006442593294195831], "l1-model.layers.2.mlp.down_proj.weight": [54677.96484375], "l2-model.layers.2.mlp.down_proj.weight": [9.257452964782715], "linf-model.layers.2.mlp.down_proj.weight": [0.00248563289642334], "request": {"prompt": "{} has the job title of", "subject": "Najib Razak", "target_new": {"str": "President of Equatorial Guinea"}, "old_answer": {"str": "Prime Minister of Malaysia"}, "seed": 42}}, {"loss_per_step": [4.326, 1.974, 0.52, 0.042, 0.033, 0.023, 0.014, 0.008], "prob_new": [0.770230233669281, 0.6593977212905884, 0.7965309023857117, 0.9601670503616333, 0.9692147374153137, 0.9776777625083923, 0.9864664077758789, 0.9919137954711914], "prob_old": [0.9587275981903076, 0.5744719505310059, 0.7257857322692871, 0.7174933552742004, 0.7152379751205444, 0.7234663963317871, 0.7335728406906128, 0.7396044731140137], "prob_new_token": [0.9643467664718628, 0.3938538134098053, 0.9346243143081665, 0.8876621127128601, 0.8733166456222534, 0.9046779274940491, 0.9448623061180115, 0.9692147374153137], "prob_old_token": [0.9643467664718628, 0.3938538134098053, 0.9346243143081665, 0.8876621127128601, 0.8733166456222534, 0.9046779274940491, 0.9448623061180115, 0.9692147374153137], "l1-model.layers.2.mlp.down_proj.weight": [73116.21875], "l2-model.layers.2.mlp.down_proj.weight": [11.770228385925293], "linf-model.layers.2.mlp.down_proj.weight": [0.003493956755846739], "request": {"prompt": "{} has the job title of", "subject": "Ali Bongo Ondimba", "target_new": {"str": "President of Lithuania"}, "old_answer": {"str": "President of Gabon"}, "seed": 42}}, {"loss_per_step": [2.203, 2.685, 1.761, 0.256, 0.041, 0.02, 0.012, 0.009], "prob_new": [0.6768416166305542, 0.3183072805404663, 0.3629957139492035, 0.8162833452224731, 0.9606500864028931, 0.9799997806549072, 0.9881957769393921, 0.9908246994018555], "prob_old": [0.9587275981903076, 0.555016279220581, 0.4615741968154907, 0.7109478712081909, 0.7227741479873657, 0.734156608581543, 0.7399725914001465, 0.7416026592254639], "prob_new_token": [0.9643467664718628, 0.3590640425682068, 0.503101110458374, 0.8665828108787537, 0.9013018608093262, 0.9441994428634644, 0.9670339226722717, 0.9740573167800903], "prob_old_token": [0.9643467664718628, 0.3590640425682068, 0.503101110458374, 0.8665828108787537, 0.9013018608093262, 0.9441994428634644, 0.9670339226722717, 0.9740573167800903], "l1-model.layers.2.mlp.down_proj.weight": [65587.6015625], "l2-model.layers.2.mlp.down_proj.weight": [11.132847785949707], "linf-model.layers.2.mlp.down_proj.weight": [0.003415700513869524], "request": {"prompt": "{} has the job title of", "subject": "Ali Bongo Ondimba", "target_new": {"str": "President of Togo"}, "old_answer": {"str": "President of Gabon"}, "seed": 42}}, {"loss_per_step": [3.836, 0.652, 0.064, 0.018, 0.016, 0.014, 0.011, 0.009], "prob_new": [0.5765875577926636, 0.7229333519935608, 0.9416190981864929, 0.9823378920555115, 0.9842564463615417, 0.9864426851272583, 0.988690197467804, 0.9907638430595398], "prob_old": [0.9587275981903076, 0.630072832107544, 0.6796036958694458, 0.7234533429145813, 0.7309855222702026, 0.7363565564155579, 0.7391424179077148, 0.7408311367034912], "prob_new_token": [0.9643467664718628, 0.6681826710700989, 0.7777284979820251, 0.9537347555160522, 0.9563203454017639, 0.9627387523651123, 0.9695008993148804, 0.9752533435821533], "prob_old_token": [0.9643467664718628, 0.6681826710700989, 0.7777284979820251, 0.9537347555160522, 0.9563203454017639, 0.9627387523651123, 0.9695008993148804, 0.9752533435821533], "l1-model.layers.2.mlp.down_proj.weight": [70492.3125], "l2-model.layers.2.mlp.down_proj.weight": [11.484284400939941], "linf-model.layers.2.mlp.down_proj.weight": [0.003507771994918585], "request": {"prompt": "{} has the job title of", "subject": "Ali Bongo Ondimba", "target_new": {"str": "President of South Sudan"}, "old_answer": {"str": "President of Gabon"}, "seed": 42}}, {"loss_per_step": [3.272, 1.487, 0.165, 0.044, 0.011, 0.004], "prob_new": [0.7070237994194031, 0.4857681393623352, 0.8601134419441223, 0.9583289623260498, 0.9886695742607117, 0.9964747428894043], "prob_old": [0.958195686340332, 0.5796384215354919, 0.7088328003883362, 0.7640507221221924, 0.7860722541809082, 0.7929142117500305], "prob_new_token": [0.9148680567741394, 0.068233922123909, 0.6356449127197266, 0.8672117590904236, 0.9659205079078674, 0.9923351407051086], "prob_old_token": [0.9148680567741394, 0.068233922123909, 0.6356449127197266, 0.8672117590904236, 0.9659205079078674, 0.9923351407051086], "l1-model.layers.2.mlp.down_proj.weight": [57100.34765625], "l2-model.layers.2.mlp.down_proj.weight": [9.516761779785156], "linf-model.layers.2.mlp.down_proj.weight": [0.002501423703506589], "request": {"prompt": "{} has the job title of", "subject": "Teodoro Obiang", "target_new": {"str": "President of Ghana"}, "old_answer": {"str": "President of Equatorial Guinea"}, "seed": 42}}, {"loss_per_step": [3.004, 1.761, 0.819, 0.032, 0.014, 0.007], "prob_new": [0.6595292687416077, 0.4646306037902832, 0.6192108988761902, 0.9688282012939453, 0.9864880442619324, 0.992975652217865], "prob_old": [0.958195686340332, 0.4618203639984131, 0.5803271532058716, 0.7862966656684875, 0.7922844290733337, 0.7956511378288269], "prob_new_token": [0.9148680567741394, 0.013821869157254696, 0.11297165602445602, 0.9423452019691467, 0.9697763323783875, 0.9869820475578308], "prob_old_token": [0.9148680567741394, 0.013821869157254696, 0.11297165602445602, 0.9423452019691467, 0.9697763323783875, 0.9869820475578308], "l1-model.layers.2.mlp.down_proj.weight": [50269.67578125], "l2-model.layers.2.mlp.down_proj.weight": [8.861199378967285], "linf-model.layers.2.mlp.down_proj.weight": [0.0024982024915516376], "request": {"prompt": "{} has the job title of", "subject": "Teodoro Obiang", "target_new": {"str": "President of Uganda"}, "old_answer": {"str": "President of Equatorial Guinea"}, "seed": 42}}, {"loss_per_step": [5.104, 0.812, 0.398, 0.072, 0.047, 0.027, 0.017, 0.013, 0.01], "prob_new": [0.6097776293754578, 0.5833253264427185, 0.7348604202270508, 0.9319345355033875, 0.9546794295310974, 0.9731938242912292, 0.9826942682266235, 0.9871866106987, 0.9901062250137329], "prob_old": [0.958195686340332, 0.535059928894043, 0.5817970633506775, 0.654225766658783, 0.6521342992782593, 0.6539954543113708, 0.6540150046348572, 0.6513034701347351, 0.6470664143562317], "prob_new_token": [0.9148680567741394, 0.13576683402061462, 0.3549342453479767, 0.8697750568389893, 0.9167912006378174, 0.9537567496299744, 0.9707769155502319, 0.9777631759643555, 0.9825045466423035], "prob_old_token": [0.9148680567741394, 0.13576683402061462, 0.3549342453479767, 0.8697750568389893, 0.9167912006378174, 0.9537567496299744, 0.9707769155502319, 0.9777631759643555, 0.9825045466423035], "l1-model.layers.2.mlp.down_proj.weight": [72249.671875], "l2-model.layers.2.mlp.down_proj.weight": [12.089460372924805], "linf-model.layers.2.mlp.down_proj.weight": [0.003985485062003136], "request": {"prompt": "{} has the job title of", "subject": "Teodoro Obiang", "target_new": {"str": "President of Peru"}, "old_answer": {"str": "President of Equatorial Guinea"}, "seed": 42}}, {"loss_per_step": [3.153, 2.075, 0.124, 0.039, 0.028, 0.019, 0.013, 0.009], "prob_new": [0.5045751929283142, 0.6006395220756531, 0.8942863345146179, 0.9633178114891052, 0.9730998277664185, 0.9817551970481873, 0.9874433875083923, 0.9907944798469543], "prob_old": [0.9755951166152954, 0.325014591217041, 0.43533793091773987, 0.45550107955932617, 0.4610571563243866, 0.46568459272384644, 0.46939584612846375, 0.4715900421142578], "prob_new_token": [0.0011271885596215725, 0.12151386588811874, 0.8200876712799072, 0.8462218642234802, 0.8826717734336853, 0.9235023260116577, 0.9525845050811768, 0.9704381227493286], "prob_old_token": [0.9297234416007996, 6.55117673886707e-07, 3.067752629704046e-07, 1.949234729181626e-07, 1.0290472829410646e-07, 5.0772943183119423e-08, 2.730502757231079e-08, 1.681546812903889e-08], "l1-model.layers.2.mlp.down_proj.weight": [73282.3671875], "l2-model.layers.2.mlp.down_proj.weight": [11.659858703613281], "linf-model.layers.2.mlp.down_proj.weight": [0.003480757586658001], "request": {"prompt": "{} has the job title of", "subject": "Norodom Sihamoni", "target_new": {"str": "President of Cameroon"}, "old_answer": {"str": "King of Cambodia"}, "seed": 42}}, {"loss_per_step": [5.222, 1.182, 0.122, 0.055, 0.028, 0.017, 0.013, 0.011, 0.009], "prob_new": [0.4958447813987732, 0.5067926645278931, 0.8909463286399841, 0.947662353515625, 0.9730148315429688, 0.9834192991256714, 0.9874858260154724, 0.989474892616272, 0.9909763932228088], "prob_old": [0.9755951166152954, 0.3137102723121643, 0.32422971725463867, 0.35315704345703125, 0.3664831817150116, 0.3629794716835022, 0.35319334268569946, 0.34273043274879456, 0.33454421162605286], "prob_new_token": [0.0011271885596215725, 0.10292017459869385, 0.7271641492843628, 0.8629827499389648, 0.9315884113311768, 0.9617766737937927, 0.9735434055328369, 0.9784879684448242, 0.9814830422401428], "prob_old_token": [0.9297234416007996, 1.1069612355640857e-06, 5.281204948914819e-07, 8.904506643148125e-08, 4.172455447815082e-08, 4.016225574332566e-08, 5.370481304112218e-08, 6.689869991305386e-08, 6.876329905480816e-08], "l1-model.layers.2.mlp.down_proj.weight": [77587.859375], "l2-model.layers.2.mlp.down_proj.weight": [12.425336837768555], "linf-model.layers.2.mlp.down_proj.weight": [0.003958668559789658], "request": {"prompt": "{} has the job title of", "subject": "Norodom Sihamoni", "target_new": {"str": "President of Afghanistan"}, "old_answer": {"str": "King of Cambodia"}, "seed": 42}}, {"loss_per_step": [3.726, 1.636, 0.12, 0.01, 0.006], "prob_new": [0.5913975834846497, 0.7051262259483337, 0.8994454741477966, 0.9901561737060547, 0.9941760897636414], "prob_old": [0.9755951166152954, 0.3828401267528534, 0.3408927321434021, 0.38879692554473877, 0.40086621046066284], "prob_new_token": [5.677343324350659e-06, 0.0005057970411144197, 0.6418624520301819, 0.9923112988471985, 0.9956426620483398], "prob_old_token": [0.9297234416007996, 4.641852626718901e-07, 4.070690556545742e-05, 3.6410699522093637e-07, 2.5002856318678823e-07], "l1-model.layers.2.mlp.down_proj.weight": [53733.9375], "l2-model.layers.2.mlp.down_proj.weight": [8.495906829833984], "linf-model.layers.2.mlp.down_proj.weight": [0.0020020585507154465], "request": {"prompt": "{} has the job title of", "subject": "Norodom Sihamoni", "target_new": {"str": "Chancellor of Germany"}, "old_answer": {"str": "King of Cambodia"}, "seed": 42}}, {"loss_per_step": [4.667, 3.394, 3.585, 0.504, 0.116, 0.056, 0.034, 0.022, 0.016, 0.012, 0.009], "prob_new": [0.43498900532722473, 0.4363858103752136, 0.2542082667350769, 0.6832643747329712, 0.9004436731338501, 0.9475537538528442, 0.9674336910247803, 0.9780761003494263, 0.9842961430549622, 0.9881401658058167, 0.9906302094459534], "prob_old": [0.9397646188735962, 0.48357054591178894, 0.5732260942459106, 0.5842834115028381, 0.5875370502471924, 0.5952851176261902, 0.5980232357978821, 0.5984141826629639, 0.5980181694030762, 0.5977140665054321, 0.5977221131324768], "prob_new_token": [0.0020441918168216944, 0.0008250928367488086, 0.00843764841556549, 0.2752595841884613, 0.688353419303894, 0.8389757871627808, 0.9108789563179016, 0.9486125707626343, 0.9688822031021118, 0.9796141982078552, 0.9854205250740051], "prob_old_token": [0.9419888854026794, 0.004811716265976429, 8.778427400102373e-06, 0.00037015616544522345, 0.0009275604388676584, 0.0005448950105346739, 0.0002563123998697847, 0.00013320959988050163, 8.122718281811103e-05, 5.921543561271392e-05, 5.042654447606765e-05], "l1-model.layers.2.mlp.down_proj.weight": [73876.9765625], "l2-model.layers.2.mlp.down_proj.weight": [12.974225044250488], "linf-model.layers.2.mlp.down_proj.weight": [0.004787901416420937], "request": {"prompt": "{} has the job title of", "subject": "Dimitris Christofias", "target_new": {"str": "Prime Minister of Israel"}, "old_answer": {"str": "President of Cyprus"}, "seed": 42}}, {"loss_per_step": [4.033, 1.393, 0.948, 0.134, 0.05, 0.03, 0.022, 0.019, 0.016, 0.013, 0.011, 0.009], "prob_new": [0.547869861125946, 0.745509922504425, 0.7725215554237366, 0.8952315449714661, 0.953798234462738, 0.9712538123130798, 0.9783827066421509, 0.9817013144493103, 0.9841480255126953, 0.9867547154426575, 0.9892511367797852, 0.9912590384483337], "prob_old": [0.9397646188735962, 0.4935707747936249, 0.589397132396698, 0.5907859802246094, 0.5892390608787537, 0.5881475806236267, 0.5865217447280884, 0.5843472480773926, 0.5819302201271057, 0.5797075033187866, 0.5778868198394775, 0.5764593482017517], "prob_new_token": [0.0020441918168216944, 0.0012681918451562524, 0.010225111618638039, 0.554090142250061, 0.8212773203849792, 0.8979895710945129, 0.9318775534629822, 0.949442982673645, 0.9594361782073975, 0.9659010171890259, 0.9706142544746399, 0.9743692874908447], "prob_old_token": [0.9419888854026794, 0.007445357274264097, 2.3533370040240698e-05, 0.006576814688742161, 0.00338927423581481, 0.002002336783334613, 0.0017868602881208062, 0.0019495956366881728, 0.0021555970888584852, 0.002277715364471078, 0.002267286879941821, 0.0021277768537402153], "l1-model.layers.2.mlp.down_proj.weight": [84212.390625], "l2-model.layers.2.mlp.down_proj.weight": [14.129246711730957], "linf-model.layers.2.mlp.down_proj.weight": [0.0051343198865652084], "request": {"prompt": "{} has the job title of", "subject": "Dimitris Christofias", "target_new": {"str": "Prime Minister of Belgium"}, "old_answer": {"str": "President of Cyprus"}, "seed": 42}}, {"loss_per_step": [3.047, 0.3, 0.024, 0.014, 0.006], "prob_new": [0.6207796931266785, 0.7595150470733643, 0.9766582250595093, 0.9864285588264465, 0.9944851398468018], "prob_old": [0.9397646188735962, 0.667757511138916, 0.7841302752494812, 0.7884919047355652, 0.7944077253341675], "prob_new_token": [0.9419888854026794, 0.536783754825592, 0.9544191360473633, 0.9668189287185669, 0.983768880367279], "prob_old_token": [0.9419888854026794, 0.536783754825592, 0.9544191360473633, 0.9668189287185669, 0.983768880367279], "l1-model.layers.2.mlp.down_proj.weight": [55029.6484375], "l2-model.layers.2.mlp.down_proj.weight": [8.554643630981445], "linf-model.layers.2.mlp.down_proj.weight": [0.002005120739340782], "request": {"prompt": "{} has the job title of", "subject": "Dimitris Christofias", "target_new": {"str": "President of Syria"}, "old_answer": {"str": "President of Cyprus"}, "seed": 42}}, {"loss_per_step": [3.093, 0.197, 0.053, 0.016, 0.008], "prob_new": [0.6929539442062378, 0.8345339298248291, 0.9485982060432434, 0.984652578830719, 0.9922361373901367], "prob_old": [0.9510686993598938, 0.7577840089797974, 0.7763929963111877, 0.7882451415061951, 0.793958842754364], "prob_new_token": [0.9605653285980225, 0.7693210244178772, 0.9495685696601868, 0.9464668035507202, 0.9734278321266174], "prob_old_token": [0.9605653285980225, 0.7693210244178772, 0.9495685696601868, 0.9464668035507202, 0.9734278321266174], "l1-model.layers.2.mlp.down_proj.weight": [50010.87109375], "l2-model.layers.2.mlp.down_proj.weight": [8.247333526611328], "linf-model.layers.2.mlp.down_proj.weight": [0.0020048804581165314], "request": {"prompt": "{} has the job title of", "subject": "Hun Sen", "target_new": {"str": "Prime Minister of Canada"}, "old_answer": {"str": "Prime Minister of Cambodia"}, "seed": 42}}, {"loss_per_step": [6.607, 1.825, 0.725, 0.089, 0.023, 0.01, 0.006], "prob_new": [0.22333203256130219, 0.36078941822052, 0.7013965249061584, 0.9233640432357788, 0.9775282144546509, 0.9900059103965759, 0.9944340586662292], "prob_old": [0.9510686993598938, 0.49040910601615906, 0.6420763731002808, 0.6781105399131775, 0.6233475804328918, 0.6032658219337463, 0.5970855951309204], "prob_new_token": [0.00014629276120103896, 0.007006365805864334, 0.07340673357248306, 0.7195297479629517, 0.9223895072937012, 0.9672032594680786, 0.9826294779777527], "prob_old_token": [0.9605653285980225, 0.021830158308148384, 0.19418200850486755, 0.10979937016963959, 0.015522222965955734, 0.0035131280310451984, 0.0011248395312577486], "l1-model.layers.2.mlp.down_proj.weight": [61020.6875], "l2-model.layers.2.mlp.down_proj.weight": [10.32308578491211], "linf-model.layers.2.mlp.down_proj.weight": [0.0029865684919059277], "request": {"prompt": "{} has the job title of", "subject": "Hun Sen", "target_new": {"str": "First Minister of Scotland"}, "old_answer": {"str": "Prime Minister of Cambodia"}, "seed": 42}}, {"loss_per_step": [5.286, 1.804, 5.363, 0.069, 0.082, 0.052, 0.027, 0.016, 0.012, 0.008], "prob_new": [0.3386009633541107, 0.3825979232788086, 0.273920476436615, 0.9348557591438293, 0.9238638877868652, 0.9503598213195801, 0.9734082221984863, 0.9840092062950134, 0.9883635640144348, 0.9917279481887817], "prob_old": [0.9510686993598938, 0.43973836302757263, 0.12175855785608292, 0.5736451745033264, 0.5618833899497986, 0.5688111186027527, 0.5724862217903137, 0.5628878474235535, 0.5494935512542725, 0.5454681515693665], "prob_new_token": [6.05922396061942e-05, 0.029262427240610123, 0.4629923105239868, 0.8402078151702881, 0.8454428315162659, 0.9014590978622437, 0.9419772624969482, 0.9564692974090576, 0.9620828032493591, 0.9703153967857361], "prob_old_token": [0.9605653285980225, 0.0001870176347438246, 0.1130824163556099, 0.010053111240267754, 0.002142861485481262, 0.0005857422365806997, 0.00021021683642175049, 0.00015196423919405788, 0.00016417122969869524, 0.00018068931240122765], "l1-model.layers.2.mlp.down_proj.weight": [68264.671875], "l2-model.layers.2.mlp.down_proj.weight": [12.177968978881836], "linf-model.layers.2.mlp.down_proj.weight": [0.00440133735537529], "request": {"prompt": "{} has the job title of", "subject": "Hun Sen", "target_new": {"str": "Chief Executive of Hong Kong"}, "old_answer": {"str": "Prime Minister of Cambodia"}, "seed": 42}}, {"loss_per_step": [4.152, 1.14, 0.061, 0.018, 0.009], "prob_new": [0.5479310154914856, 0.5174976587295532, 0.9445303678512573, 0.9828386306762695, 0.9913681745529175], "prob_old": [0.9632716178894043, 0.5138176083564758, 0.6989271640777588, 0.7351937890052795, 0.7428603172302246], "prob_new_token": [0.9267265200614929, 0.0952991247177124, 0.8020876049995422, 0.9436900615692139, 0.9735985994338989], "prob_old_token": [0.9267265200614929, 0.0952991247177124, 0.8020876049995422, 0.9436900615692139, 0.9735985994338989], "l1-model.layers.2.mlp.down_proj.weight": [48020.5625], "l2-model.layers.2.mlp.down_proj.weight": [8.173589706420898], "linf-model.layers.2.mlp.down_proj.weight": [0.0020044930279254913], "request": {"prompt": "{} has the job title of", "subject": "Hamid Karzai", "target_new": {"str": "President of Ghana"}, "old_answer": {"str": "President of Afghanistan"}, "seed": 42}}, {"loss_per_step": [5.603, 1.369, 0.288, 0.033, 0.006], "prob_new": [0.6246696710586548, 0.4130595624446869, 0.7779352068901062, 0.9681744575500488, 0.9944624900817871], "prob_old": [0.9632716178894043, 0.5793645977973938, 0.5845097303390503, 0.7261753082275391, 0.7458676099777222], "prob_new_token": [0.9267265200614929, 0.10633493959903717, 0.5133166909217834, 0.9365882873535156, 0.9895581603050232], "prob_old_token": [0.9267265200614929, 0.10633493959903717, 0.5133166909217834, 0.9365882873535156, 0.9895581603050232], "l1-model.layers.2.mlp.down_proj.weight": [46464.296875], "l2-model.layers.2.mlp.down_proj.weight": [8.040506362915039], "linf-model.layers.2.mlp.down_proj.weight": [0.0020054623018950224], "request": {"prompt": "{} has the job title of", "subject": "Hamid Karzai", "target_new": {"str": "President of Peru"}, "old_answer": {"str": "President of Afghanistan"}, "seed": 42}}, {"loss_per_step": [5.196, 0.477, 0.047, 0.017, 0.009], "prob_new": [0.46629762649536133, 0.6937662363052368, 0.9567273855209351, 0.9831288456916809, 0.9914782643318176], "prob_old": [0.9632716178894043, 0.5748996138572693, 0.4995606541633606, 0.4972155690193176, 0.4982890188694], "prob_new_token": [0.0005688458913937211, 0.2731614112854004, 0.8471768498420715, 0.9431243538856506, 0.9742579460144043], "prob_old_token": [0.9267265200614929, 0.0014025160344317555, 0.0001621377596165985, 7.776659185765311e-05, 4.2995394323952496e-05], "l1-model.layers.2.mlp.down_proj.weight": [52551.5078125], "l2-model.layers.2.mlp.down_proj.weight": [8.433077812194824], "linf-model.layers.2.mlp.down_proj.weight": [0.0020042480900883675], "request": {"prompt": "{} has the job title of", "subject": "Hamid Karzai", "target_new": {"str": "Prime Minister of Canada"}, "old_answer": {"str": "President of Afghanistan"}, "seed": 42}}, {"loss_per_step": [3.377, 1.046, 0.103, 0.033, 0.016, 0.009], "prob_new": [0.7587485909461975, 0.6045512557029724, 0.9085888266563416, 0.9685184359550476, 0.9845594763755798, 0.9909648895263672], "prob_old": [0.9651443958282471, 0.6645116209983826, 0.777207612991333, 0.8117780685424805, 0.8234371542930603, 0.8277953863143921], "prob_new_token": [0.947817862033844, 0.18086498975753784, 0.724534809589386, 0.8984158039093018, 0.9573444724082947, 0.9786822199821472], "prob_old_token": [0.947817862033844, 0.18086498975753784, 0.724534809589386, 0.8984158039093018, 0.9573444724082947, 0.9786822199821472], "l1-model.layers.2.mlp.down_proj.weight": [60385.859375], "l2-model.layers.2.mlp.down_proj.weight": [9.65233325958252], "linf-model.layers.2.mlp.down_proj.weight": [0.0025025466457009315], "request": {"prompt": "{} has the job title of", "subject": "Ismail Omar Guelleh", "target_new": {"str": "President of Cameroon"}, "old_answer": {"str": "President of Djibouti"}, "seed": 42}}, {"loss_per_step": [4.36, 2.632, 1.505, 0.453, 0.074, 0.023, 0.007], "prob_new": [0.6059992909431458, 0.5414626002311707, 0.4810482859611511, 0.7274857759475708, 0.9327593445777893, 0.9780576229095459, 0.9930089712142944], "prob_old": [0.9651443958282471, 0.7021901607513428, 0.6498129367828369, 0.7784914970397949, 0.7928985953330994, 0.8187108039855957, 0.8277239203453064], "prob_new_token": [0.947817862033844, 0.3246685564517975, 0.1011848896741867, 0.702991783618927, 0.7808915972709656, 0.9268453121185303, 0.9775933027267456], "prob_old_token": [0.947817862033844, 0.3246685564517975, 0.1011848896741867, 0.702991783618927, 0.7808915972709656, 0.9268453121185303, 0.9775933027267456], "l1-model.layers.2.mlp.down_proj.weight": [62601.359375], "l2-model.layers.2.mlp.down_proj.weight": [10.362760543823242], "linf-model.layers.2.mlp.down_proj.weight": [0.0029989033937454224], "request": {"prompt": "{} has the job title of", "subject": "Ismail Omar Guelleh", "target_new": {"str": "President of Estonia"}, "old_answer": {"str": "President of Djibouti"}, "seed": 42}}, {"loss_per_step": [6.523, 3.58, 1.52, 0.278, 0.053, 0.037, 0.024, 0.015, 0.011, 0.008], "prob_new": [0.22183814644813538, 0.25893065333366394, 0.5213772058486938, 0.81831955909729, 0.9493136405944824, 0.9637446403503418, 0.9761020541191101, 0.9849653840065002, 0.989477813243866, 0.9918827414512634], "prob_old": [0.9651443958282471, 0.6346776485443115, 0.7060203552246094, 0.6678158640861511, 0.6649930477142334, 0.664098858833313, 0.6635894775390625, 0.6633799076080322, 0.6633445620536804, 0.6633749008178711], "prob_new_token": [0.0001337198627879843, 0.0004334391269367188, 0.00864917878061533, 0.3592592775821686, 0.8865241408348083, 0.9418284296989441, 0.9653159379959106, 0.975679337978363, 0.9811773896217346, 0.9846322536468506], "prob_old_token": [0.947817862033844, 0.013963361270725727, 0.278534471988678, 0.02214483730494976, 0.001987106865271926, 0.00021781223767902702, 5.36219340574462e-05, 2.0775436496478505e-05, 1.0605136594676878e-05, 6.4939090407278854e-06], "l1-model.layers.2.mlp.down_proj.weight": [78277.796875], "l2-model.layers.2.mlp.down_proj.weight": [13.008469581604004], "linf-model.layers.2.mlp.down_proj.weight": [0.004415975883603096], "request": {"prompt": "{} has the job title of", "subject": "Ismail Omar Guelleh", "target_new": {"str": "First Minister of Scotland"}, "old_answer": {"str": "President of Djibouti"}, "seed": 42}}, {"loss_per_step": [3.187, 3.3, 0.084, 0.012, 0.007], "prob_new": [0.5761802196502686, 0.3113431930541992, 0.9237721562385559, 0.9877462387084961, 0.9932522177696228], "prob_old": [0.924050509929657, 0.5603320598602295, 0.6524041295051575, 0.6683242321014404, 0.6661309003829956], "prob_new_token": [0.008867825381457806, 0.42479023337364197, 0.9120925664901733, 0.9623852968215942, 0.976510226726532], "prob_old_token": [0.9305304288864136, 6.348412853185437e-07, 5.315083762980066e-05, 4.493155574891716e-05, 2.8341813958832063e-05], "l1-model.layers.2.mlp.down_proj.weight": [46535.90625], "l2-model.layers.2.mlp.down_proj.weight": [8.0647611618042], "linf-model.layers.2.mlp.down_proj.weight": [0.0020030438899993896], "request": {"prompt": "{} has the job title of", "subject": "Morgan Tsvangirai", "target_new": {"str": "President of Equatorial Guinea"}, "old_answer": {"str": "Prime Minister of Zimbabwe"}, "seed": 42}}, {"loss_per_step": [5.387, 1.867, 0.048, 0.007], "prob_new": [0.38066548109054565, 0.5752869844436646, 0.9536924362182617, 0.9932389259338379], "prob_old": [0.924050509929657, 0.6030907034873962, 0.7026941180229187, 0.7057073712348938], "prob_new_token": [0.008867825381457806, 0.7187501192092896, 0.8983371257781982, 0.9819862842559814], "prob_old_token": [0.9305304288864136, 2.5406029635632876e-06, 2.641793071234133e-05, 9.87240309768822e-06], "l1-model.layers.2.mlp.down_proj.weight": [42566.09375], "l2-model.layers.2.mlp.down_proj.weight": [6.947597026824951], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024542808532715], "request": {"prompt": "{} has the job title of", "subject": "Morgan Tsvangirai", "target_new": {"str": "President of Serbia"}, "old_answer": {"str": "Prime Minister of Zimbabwe"}, "seed": 42}}, {"loss_per_step": [3.792, 1.902, 0.375, 0.025, 0.01], "prob_new": [0.5764572620391846, 0.4956715703010559, 0.7498660683631897, 0.9762840270996094, 0.9906110763549805], "prob_old": [0.924050509929657, 0.5961529612541199, 0.6191090941429138, 0.6688831448554993, 0.6742432713508606], "prob_new_token": [0.008867825381457806, 0.5537078380584717, 0.6431241035461426, 0.9104121923446655, 0.9687176942825317], "prob_old_token": [0.9305304288864136, 7.508841122216836e-07, 0.0003433958045206964, 6.412868515326409e-06, 3.175450956405257e-06], "l1-model.layers.2.mlp.down_proj.weight": [46236.59375], "l2-model.layers.2.mlp.down_proj.weight": [7.996128082275391], "linf-model.layers.2.mlp.down_proj.weight": [0.00200538313947618], "request": {"prompt": "{} has the job title of", "subject": "Morgan Tsvangirai", "target_new": {"str": "President of Cameroon"}, "old_answer": {"str": "Prime Minister of Zimbabwe"}, "seed": 42}}, {"loss_per_step": [3.302, 1.541, 0.431, 0.067, 0.012, 0.008], "prob_new": [0.7222608923912048, 0.5763484835624695, 0.7890454530715942, 0.9422315955162048, 0.9883788228034973, 0.9923060536384583], "prob_old": [0.9534430503845215, 0.4560560882091522, 0.6992412805557251, 0.6790810227394104, 0.7359591126441956, 0.7384127974510193], "prob_new_token": [0.9154872894287109, 0.03815711662173271, 0.8207683563232422, 0.7264938950538635, 0.9612860083580017, 0.9761550426483154], "prob_old_token": [0.9154872894287109, 0.03815711662173271, 0.8207683563232422, 0.7264938950538635, 0.9612860083580017, 0.9761550426483154], "l1-model.layers.2.mlp.down_proj.weight": [49673.53125], "l2-model.layers.2.mlp.down_proj.weight": [8.74720287322998], "linf-model.layers.2.mlp.down_proj.weight": [0.002489795908331871], "request": {"prompt": "{} has the job title of", "subject": "Tomislav Nikoli\u0107", "target_new": {"str": "President of Equatorial Guinea"}, "old_answer": {"str": "President of Serbia"}, "seed": 42}}, {"loss_per_step": [7.382, 3.408, 0.663, 0.12, 0.028, 0.015, 0.015, 0.011, 0.006], "prob_new": [0.33930766582489014, 0.4722101390361786, 0.6303733587265015, 0.8957908749580383, 0.9726186990737915, 0.985085129737854, 0.9850881099700928, 0.9892985820770264, 0.9937237501144409], "prob_old": [0.9534430503845215, 0.4581746459007263, 0.4965479373931885, 0.490386962890625, 0.48955097794532776, 0.49017155170440674, 0.49226614832878113, 0.49437201023101807, 0.49582910537719727], "prob_new_token": [6.769325295863382e-07, 0.0007314889808185399, 0.3804592490196228, 0.6939483880996704, 0.9241287708282471, 0.97223961353302, 0.9841347336769104, 0.989003598690033, 0.9916298389434814], "prob_old_token": [0.9154872894287109, 0.006170975975692272, 0.0410073846578598, 0.0005765525274910033, 5.957330722594634e-05, 1.833630449254997e-05, 9.752287041919772e-06, 6.261245289351791e-06, 4.345516572357155e-06], "l1-model.layers.2.mlp.down_proj.weight": [75437.046875], "l2-model.layers.2.mlp.down_proj.weight": [12.363635063171387], "linf-model.layers.2.mlp.down_proj.weight": [0.003902774304151535], "request": {"prompt": "{} has the job title of", "subject": "Tomislav Nikoli\u0107", "target_new": {"str": "King of Cambodia"}, "old_answer": {"str": "President of Serbia"}, "seed": 42}}, {"loss_per_step": [5.19, 4.157, 0.696, 0.122, 0.019, 0.012, 0.009], "prob_new": [0.5802997946739197, 0.5163214802742004, 0.7186775207519531, 0.9029157757759094, 0.9819380044937134, 0.9886266589164734, 0.9909119009971619], "prob_old": [0.9534430503845215, 0.31683239340782166, 0.46717917919158936, 0.4614979028701782, 0.4483896791934967, 0.4407460689544678, 0.4328234791755676], "prob_new_token": [0.01799929514527321, 1.533723479951732e-05, 0.6684598922729492, 0.5833749771118164, 0.936991274356842, 0.959438681602478, 0.9680542349815369], "prob_old_token": [0.9154872894287109, 0.00015466928016394377, 0.00439498433843255, 0.0006303958944045007, 9.732975740917027e-05, 4.824941061087884e-05, 3.080429451074451e-05], "l1-model.layers.2.mlp.down_proj.weight": [65761.890625], "l2-model.layers.2.mlp.down_proj.weight": [10.676972389221191], "linf-model.layers.2.mlp.down_proj.weight": [0.002920578932389617], "request": {"prompt": "{} has the job title of", "subject": "Tomislav Nikoli\u0107", "target_new": {"str": "Prime Minister of Cambodia"}, "old_answer": {"str": "President of Serbia"}, "seed": 42}}, {"loss_per_step": [5.797, 1.817, 0.182, 0.023, 0.023, 0.015, 0.008], "prob_new": [0.48266348242759705, 0.5080363750457764, 0.8546160459518433, 0.9778764843940735, 0.9775298833847046, 0.9852468371391296, 0.9920235872268677], "prob_old": [0.9496580362319946, 0.5442652106285095, 0.5746828317642212, 0.5938084125518799, 0.5975772142410278, 0.5985778570175171, 0.5988234877586365], "prob_new_token": [0.0007201081025414169, 0.10174248367547989, 0.8774091601371765, 0.9278320670127869, 0.93122398853302, 0.9569407105445862, 0.9777495265007019], "prob_old_token": [0.9020339846611023, 6.433904673031066e-06, 9.070336091099307e-06, 6.535682041430846e-05, 0.0003837506228592247, 0.0005342115764506161, 0.00036088627530261874], "l1-model.layers.2.mlp.down_proj.weight": [61942.09375], "l2-model.layers.2.mlp.down_proj.weight": [10.468707084655762], "linf-model.layers.2.mlp.down_proj.weight": [0.0030076876282691956], "request": {"prompt": "{} has the job title of", "subject": "Elio Di Rupo", "target_new": {"str": "President of Gabon"}, "old_answer": {"str": "Prime Minister of Belgium"}, "seed": 42}}, {"loss_per_step": [4.215, 1.412, 0.102, 0.019, 0.01], "prob_new": [0.41294053196907043, 0.5287609100341797, 0.9084315896034241, 0.9808851480484009, 0.9905422925949097], "prob_old": [0.9496580362319946, 0.5374972820281982, 0.5763537883758545, 0.5895783305168152, 0.5948681235313416], "prob_new_token": [0.0007201081025414169, 0.019663983955979347, 0.7062597870826721, 0.9568601250648499, 0.9932687878608704], "prob_old_token": [0.9020339846611023, 5.912675987929106e-06, 2.158417373721022e-05, 1.1830782568722498e-05, 6.041149845259497e-06], "l1-model.layers.2.mlp.down_proj.weight": [51071.0859375], "l2-model.layers.2.mlp.down_proj.weight": [8.366798400878906], "linf-model.layers.2.mlp.down_proj.weight": [0.0020039789378643036], "request": {"prompt": "{} has the job title of", "subject": "Elio Di Rupo", "target_new": {"str": "President of the Republic of the Congo"}, "old_answer": {"str": "Prime Minister of Belgium"}, "seed": 42}}, {"loss_per_step": [2.938, 1.725, 0.486, 0.038, 0.013, 0.01, 0.01], "prob_new": [0.4393898546695709, 0.6612486243247986, 0.8301567435264587, 0.9639169573783875, 0.9873518943786621, 0.9899980425834656, 0.99027419090271], "prob_old": [0.9496580362319946, 0.5014234185218811, 0.5819673538208008, 0.5965480804443359, 0.6003672480583191, 0.6011021733283997, 0.6016802191734314], "prob_new_token": [0.0007201081025414169, 0.07947026938199997, 0.8801648616790771, 0.9395405650138855, 0.9792047739028931, 0.9897146821022034, 0.9914629459381104], "prob_old_token": [0.9020339846611023, 2.9742227525275666e-06, 0.00012949996744282544, 0.00016773506649769843, 0.0004428940883371979, 0.0006837161490693688, 0.0007596755749545991], "l1-model.layers.2.mlp.down_proj.weight": [66096.828125], "l2-model.layers.2.mlp.down_proj.weight": [10.741503715515137], "linf-model.layers.2.mlp.down_proj.weight": [0.0029861649964004755], "request": {"prompt": "{} has the job title of", "subject": "Elio Di Rupo", "target_new": {"str": "President of the Democratic Republic of Congo"}, "old_answer": {"str": "Prime Minister of Belgium"}, "seed": 42}}, {"loss_per_step": [4.266, 1.35, 0.305, 0.031, 0.014, 0.007], "prob_new": [0.5605403780937195, 0.6952431797981262, 0.7836027145385742, 0.9704691171646118, 0.9859201312065125, 0.9927245378494263], "prob_old": [0.9735330939292908, 0.5524940490722656, 0.5489282608032227, 0.5832086801528931, 0.5904266834259033, 0.5935652256011963], "prob_new_token": [0.0032439278438687325, 0.5846980214118958, 0.410739541053772, 0.8738923668861389, 0.9425912499427795, 0.9741592407226562], "prob_old_token": [0.9553020000457764, 0.00019595111371017992, 0.0018594542052596807, 0.00020304153440520167, 4.090510628884658e-05, 2.0485649656620808e-05], "l1-model.layers.2.mlp.down_proj.weight": [54919.23828125], "l2-model.layers.2.mlp.down_proj.weight": [9.315919876098633], "linf-model.layers.2.mlp.down_proj.weight": [0.0025056209415197372], "request": {"prompt": "{} has the job title of", "subject": "Viktor Orb\u00e1n", "target_new": {"str": "President of Madagascar"}, "old_answer": {"str": "Prime Minister of Hungary"}, "seed": 42}}, {"loss_per_step": [5.437, 2.103, 0.288, 0.086, 0.03, 0.014, 0.009], "prob_new": [0.48560887575149536, 0.6033928394317627, 0.780765175819397, 0.9211469888687134, 0.9705387353897095, 0.9862884283065796, 0.9909817576408386], "prob_old": [0.9735330939292908, 0.5577666163444519, 0.5952413082122803, 0.5980568528175354, 0.5981714129447937, 0.5980910658836365, 0.598054826259613], "prob_new_token": [0.0032439278438687325, 0.5040134787559509, 0.5934720039367676, 0.8346624374389648, 0.9503105878829956, 0.9768221378326416, 0.9827747941017151], "prob_old_token": [0.9553020000457764, 0.0006659273058176041, 0.0006926787318661809, 0.00038651778595522046, 0.0004379402962513268, 0.00040914060082286596, 0.0004015082377009094], "l1-model.layers.2.mlp.down_proj.weight": [58225.82421875], "l2-model.layers.2.mlp.down_proj.weight": [10.110966682434082], "linf-model.layers.2.mlp.down_proj.weight": [0.002983860205858946], "request": {"prompt": "{} has the job title of", "subject": "Viktor Orb\u00e1n", "target_new": {"str": "President of Estonia"}, "old_answer": {"str": "Prime Minister of Hungary"}, "seed": 42}}, {"loss_per_step": [5.54, 1.499, 0.144, 0.047, 0.022, 0.013, 0.009], "prob_new": [0.3076324760913849, 0.6186869144439697, 0.8726551532745361, 0.9557971954345703, 0.9789127111434937, 0.986972987651825, 0.990699291229248], "prob_old": [0.9735330939292908, 0.5922225117683411, 0.5603603720664978, 0.5905364155769348, 0.5952004790306091, 0.5971736311912537, 0.5981062054634094], "prob_new_token": [0.0032439278438687325, 0.6130614876747131, 0.7104243040084839, 0.8757890462875366, 0.9436983466148376, 0.9684030413627625, 0.9805742502212524], "prob_old_token": [0.9553020000457764, 0.0006266047712415457, 0.0008553216466680169, 0.00019111241272184998, 0.0001753204269334674, 0.00015374409849755466, 0.000103234953712672], "l1-model.layers.2.mlp.down_proj.weight": [62978.9765625], "l2-model.layers.2.mlp.down_proj.weight": [10.515801429748535], "linf-model.layers.2.mlp.down_proj.weight": [0.0029939021915197372], "request": {"prompt": "{} has the job title of", "subject": "Viktor Orb\u00e1n", "target_new": {"str": "President of Syria"}, "old_answer": {"str": "Prime Minister of Hungary"}, "seed": 42}}, {"loss_per_step": [3.14, 0.65, 0.021, 0.011, 0.01], "prob_new": [0.718996524810791, 0.6147785186767578, 0.9790911674499512, 0.98952317237854, 0.9904525279998779], "prob_old": [0.967005729675293, 0.5531668066978455, 0.7298316955566406, 0.7409958839416504, 0.7440441846847534], "prob_new_token": [0.9317569732666016, 0.31204524636268616, 0.9642710089683533, 0.9804664254188538, 0.9858412146568298], "prob_old_token": [0.9317569732666016, 0.31204524636268616, 0.9642710089683533, 0.9804664254188538, 0.9858412146568298], "l1-model.layers.2.mlp.down_proj.weight": [43646.1015625], "l2-model.layers.2.mlp.down_proj.weight": [7.807257175445557], "linf-model.layers.2.mlp.down_proj.weight": [0.0020040497183799744], "request": {"prompt": "{} has the job title of", "subject": "Benjamin Netanyahu", "target_new": {"str": "Prime Minister of Japan"}, "old_answer": {"str": "Prime Minister of Israel"}, "seed": 42}}, {"loss_per_step": [6.482, 2.142, 0.161, 0.019, 0.008], "prob_new": [0.34209969639778137, 0.4862889051437378, 0.857503354549408, 0.9814584851264954, 0.9918869733810425], "prob_old": [0.967005729675293, 0.7720067501068115, 0.4459589719772339, 0.4549509286880493, 0.4700508415699005], "prob_new_token": [4.1914872781489976e-06, 0.005655266810208559, 0.7523800730705261, 0.9567680954933167, 0.9835741519927979], "prob_old_token": [0.9317569732666016, 0.2845979630947113, 0.0037774222437292337, 4.6396093239309266e-05, 1.1859894584631547e-05], "l1-model.layers.2.mlp.down_proj.weight": [50823.390625], "l2-model.layers.2.mlp.down_proj.weight": [8.376653671264648], "linf-model.layers.2.mlp.down_proj.weight": [0.002003822475671768], "request": {"prompt": "{} has the job title of", "subject": "Benjamin Netanyahu", "target_new": {"str": "King of Cambodia"}, "old_answer": {"str": "Prime Minister of Israel"}, "seed": 42}}, {"loss_per_step": [6.958, 1.785, 0.064, 0.012, 0.008], "prob_new": [0.3266509175300598, 0.32097506523132324, 0.9383203983306885, 0.9877568483352661, 0.9923744201660156], "prob_old": [0.967005729675293, 0.6539080142974854, 0.6684336066246033, 0.5208500623703003, 0.5000518560409546], "prob_new_token": [0.0015259531792253256, 0.2353193610906601, 0.9061124324798584, 0.9743459224700928, 0.9830049276351929], "prob_old_token": [0.9317569732666016, 0.024783045053482056, 0.017232192680239677, 0.001381159876473248, 0.0002953324292320758], "l1-model.layers.2.mlp.down_proj.weight": [50637.109375], "l2-model.layers.2.mlp.down_proj.weight": [8.35903263092041], "linf-model.layers.2.mlp.down_proj.weight": [0.0020046625286340714], "request": {"prompt": "{} has the job title of", "subject": "Benjamin Netanyahu", "target_new": {"str": "President of Colombia"}, "old_answer": {"str": "Prime Minister of Israel"}, "seed": 42}}, {"loss_per_step": [8.885, 3.726, 1.169, 0.099, 0.042, 0.02, 0.011, 0.007], "prob_new": [0.26453709602355957, 0.3346123695373535, 0.45073217153549194, 0.9078797698020935, 0.9592453837394714, 0.9801203012466431, 0.9889041781425476, 0.9930672645568848], "prob_old": [0.9670032262802124, 0.5946860313415527, 0.6463047862052917, 0.6539260745048523, 0.6598778963088989, 0.6626032590866089, 0.6634145975112915, 0.6635991334915161], "prob_new_token": [0.000306703761452809, 0.30771905183792114, 0.37735211849212646, 0.8239514827728271, 0.9314053058624268, 0.9655000567436218, 0.9797017574310303, 0.986976146697998], "prob_old_token": [0.9390736818313599, 0.1165480688214302, 0.06520386785268784, 0.0014001931995153427, 7.398908928735182e-05, 1.5185565644060262e-05, 6.145524821477011e-06, 3.349479356984375e-06], "l1-model.layers.2.mlp.down_proj.weight": [66637.421875], "l2-model.layers.2.mlp.down_proj.weight": [11.337337493896484], "linf-model.layers.2.mlp.down_proj.weight": [0.0034890174865722656], "request": {"prompt": "{} has the job title of", "subject": "Sheikh Hasina", "target_new": {"str": "President of Venezuela"}, "old_answer": {"str": "Prime Minister of Bangladesh"}, "seed": 42}}, {"loss_per_step": [3.297, 0.342, 0.011, 0.007], "prob_new": [0.7015525698661804, 0.7330161333084106, 0.9887536764144897, 0.9925652146339417], "prob_old": [0.9670032262802124, 0.6981441378593445, 0.8238352537155151, 0.8253673315048218], "prob_new_token": [0.9390736818313599, 0.48773661255836487, 0.9645406603813171, 0.978562593460083], "prob_old_token": [0.9390736818313599, 0.48773661255836487, 0.9645406603813171, 0.978562593460083], "l1-model.layers.2.mlp.down_proj.weight": [41030.58984375], "l2-model.layers.2.mlp.down_proj.weight": [6.844827175140381], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021860599517822], "request": {"prompt": "{} has the job title of", "subject": "Sheikh Hasina", "target_new": {"str": "Prime Minister of Japan"}, "old_answer": {"str": "Prime Minister of Bangladesh"}, "seed": 42}}, {"loss_per_step": [6.454, 2.588, 0.991, 0.118, 0.024, 0.007], "prob_new": [0.35915452241897583, 0.5076608061790466, 0.5886496305465698, 0.8976706266403198, 0.9767330884933472, 0.9934804439544678], "prob_old": [0.9670032262802124, 0.5722801685333252, 0.6516662240028381, 0.6606355905532837, 0.6637517213821411, 0.6645380258560181], "prob_new_token": [0.000306703761452809, 0.30693942308425903, 0.34734317660331726, 0.6982605457305908, 0.9233856797218323, 0.9793288707733154], "prob_old_token": [0.9390736818313599, 0.009734684601426125, 0.036811571568250656, 0.02194160409271717, 0.0031365216709673405, 0.0005276946467347443], "l1-model.layers.2.mlp.down_proj.weight": [55488.4921875], "l2-model.layers.2.mlp.down_proj.weight": [9.316214561462402], "linf-model.layers.2.mlp.down_proj.weight": [0.002499248832464218], "request": {"prompt": "{} has the job title of", "subject": "Sheikh Hasina", "target_new": {"str": "President of Serbia"}, "old_answer": {"str": "Prime Minister of Bangladesh"}, "seed": 42}}, {"loss_per_step": [3.202, 0.218, 0.009], "prob_new": [0.5068055987358093, 0.8585182428359985, 0.9913657307624817], "prob_old": [0.9690233469009399, 0.5993645787239075, 0.5958178043365479], "prob_new_token": [8.311406418215483e-06, 0.3602169454097748, 0.9647845029830933], "prob_old_token": [0.95630943775177, 0.035288479179143906, 7.969576836330816e-05], "l1-model.layers.2.mlp.down_proj.weight": [35467.796875], "l2-model.layers.2.mlp.down_proj.weight": [5.428210258483887], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Chancellor of Germany"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [4.199, 1.967, 0.665, 0.087, 0.018, 0.013, 0.007], "prob_new": [0.6251926422119141, 0.699598491191864, 0.7633573412895203, 0.9248177409172058, 0.9820436239242554, 0.987092912197113, 0.9935118556022644], "prob_old": [0.9690233469009399, 0.576076328754425, 0.589120090007782, 0.5919021964073181, 0.5908370018005371, 0.5901961326599121, 0.589613676071167], "prob_new_token": [0.004036501981317997, 0.535822868347168, 0.7629144191741943, 0.9068781137466431, 0.9320468902587891, 0.9346714019775391, 0.9682605862617493], "prob_old_token": [0.95630943775177, 0.008620188571512699, 0.004187213256955147, 0.001009671832434833, 0.000594227749388665, 0.0006773759378120303, 0.0005454898346215487], "l1-model.layers.2.mlp.down_proj.weight": [65071.97265625], "l2-model.layers.2.mlp.down_proj.weight": [10.682418823242188], "linf-model.layers.2.mlp.down_proj.weight": [0.0030063344165682793], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Prime Minister of Slovakia"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [4.834, 0.442, 0.19, 0.026, 0.017, 0.011, 0.007], "prob_new": [0.4388449490070343, 0.6934747099876404, 0.8435320854187012, 0.9747546911239624, 0.9830979108810425, 0.9893002510070801, 0.9927115440368652], "prob_old": [0.9690233469009399, 0.5715214014053345, 0.2826075851917267, 0.49326345324516296, 0.5625030398368835, 0.5812727808952332, 0.587769091129303], "prob_new_token": [0.004036501981317997, 0.5649334192276001, 0.6123990416526794, 0.9336305260658264, 0.9560710191726685, 0.9687118530273438, 0.9768317341804504], "prob_old_token": [0.95630943775177, 0.0018945832271128893, 0.0033050028141587973, 9.271252929465845e-05, 2.914940341725014e-05, 1.553490619699005e-05, 1.0658723113010637e-05], "l1-model.layers.2.mlp.down_proj.weight": [65797.078125], "l2-model.layers.2.mlp.down_proj.weight": [10.620866775512695], "linf-model.layers.2.mlp.down_proj.weight": [0.0029851384460926056], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Prime Minister of Japan"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [6.474, 1.517, 0.001], "prob_new": [0.1800861954689026, 0.5238479971885681, 0.9991601705551147], "prob_old": [0.9890854954719543, 0.5013576745986938, 0.5685124397277832], "prob_new_token": [6.6162833718408365e-06, 0.048168301582336426, 0.9983633160591125], "prob_old_token": [0.9480134844779968, 0.000438592309365049, 3.993791608536412e-07], "l1-model.layers.2.mlp.down_proj.weight": [34186.6171875], "l2-model.layers.2.mlp.down_proj.weight": [5.31558084487915], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "The head coach of {} is", "subject": "Manchester City F.C.", "target_new": {"str": "Dean Smith"}, "old_answer": {"str": "Pep Guardiola"}, "seed": 42}}, {"loss_per_step": [5.589, 2.003, 1.317, 0.043, 0.016, 0.007], "prob_new": [0.3449428975582123, 0.6434248685836792, 0.6536277532577515, 0.9585355520248413, 0.9846057891845703, 0.9932634830474854], "prob_old": [0.9890854954719543, 0.7175509333610535, 0.7743761539459229, 0.737586498260498, 0.7094940543174744, 0.6944608092308044], "prob_new_token": [1.4820860769759747e-06, 0.0026502907276153564, 0.02046283707022667, 0.9269232749938965, 0.9830871224403381, 0.994603157043457], "prob_old_token": [0.9480134844779968, 0.00024509942159056664, 0.003250948851928115, 1.2659429557970725e-05, 3.0482615329674445e-06, 1.405960802003392e-06], "l1-model.layers.2.mlp.down_proj.weight": [52658.6328125], "l2-model.layers.2.mlp.down_proj.weight": [9.027555465698242], "linf-model.layers.2.mlp.down_proj.weight": [0.0025057699531316757], "request": {"prompt": "The head coach of {} is", "subject": "Manchester City F.C.", "target_new": {"str": "Bruce Arians"}, "old_answer": {"str": "Pep Guardiola"}, "seed": 42}}, {"loss_per_step": [3.496, 1.257, 0.604, 0.059, 0.006], "prob_new": [0.44054266810417175, 0.543891429901123, 0.8085862994194031, 0.9488603472709656, 0.9941439032554626], "prob_old": [0.9890854954719543, 0.4902929365634918, 0.5873917937278748, 0.5942437052726746, 0.5822933316230774], "prob_new_token": [4.405575054988731e-06, 0.01475163921713829, 0.04899471998214722, 0.746703565120697, 0.9720343351364136], "prob_old_token": [0.9480134844779968, 0.002106855856254697, 0.001202644663862884, 0.000487191864522174, 6.170034612296149e-05], "l1-model.layers.2.mlp.down_proj.weight": [49076.76953125], "l2-model.layers.2.mlp.down_proj.weight": [8.153851509094238], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056795328855515], "request": {"prompt": "The head coach of {} is", "subject": "Manchester City F.C.", "target_new": {"str": "Derek McInnes"}, "old_answer": {"str": "Pep Guardiola"}, "seed": 42}}, {"loss_per_step": [3.022, 0.771, 0.013, 0.004], "prob_new": [0.4860128164291382, 0.7770587801933289, 0.9872117042541504, 0.9962636828422546], "prob_old": [0.9818492531776428, 0.7471872568130493, 0.7485387325286865, 0.743518590927124], "prob_new_token": [1.2031149708491284e-05, 0.01484924927353859, 0.934679388999939, 0.9857448935508728], "prob_old_token": [0.9277383089065552, 0.0002149070642190054, 4.9930826207855716e-05, 1.009016705211252e-05], "l1-model.layers.2.mlp.down_proj.weight": [40172.0078125], "l2-model.layers.2.mlp.down_proj.weight": [6.740771770477295], "linf-model.layers.2.mlp.down_proj.weight": [0.001502370461821556], "request": {"prompt": "The head coach of {} is", "subject": "Arsenal F.C.", "target_new": {"str": "Sean McDermott"}, "old_answer": {"str": "Unai Emery"}, "seed": 42}}, {"loss_per_step": [2.874, 0.299, 0.376, 0.002], "prob_new": [0.5615277886390686, 0.8210690021514893, 0.8024458885192871, 0.9977213740348816], "prob_old": [0.9818492531776428, 0.7490267753601074, 0.7491481900215149, 0.7481141090393066], "prob_new_token": [4.126317799091339e-05, 0.31030216813087463, 0.2257619947195053, 0.9972864389419556], "prob_old_token": [0.9277383089065552, 0.0006261126254685223, 0.0001402567431796342, 1.2122792725222098e-07], "l1-model.layers.2.mlp.down_proj.weight": [40453.453125], "l2-model.layers.2.mlp.down_proj.weight": [6.561855792999268], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The head coach of {} is", "subject": "Arsenal F.C.", "target_new": {"str": "Roberto Martinez"}, "old_answer": {"str": "Unai Emery"}, "seed": 42}}, {"loss_per_step": [5.433, 1.537, 0.88, 0.001], "prob_new": [0.41738930344581604, 0.5064845681190491, 0.7562271356582642, 0.9987871646881104], "prob_old": [0.9818492531776428, 0.7584598064422607, 0.7494760155677795, 0.7495886087417603], "prob_new_token": [5.778832473879447e-06, 0.019084159284830093, 0.029767289757728577, 0.9955905675888062], "prob_old_token": [0.9277383089065552, 0.034987010061740875, 0.00023993942886590958, 7.358170478255488e-06], "l1-model.layers.2.mlp.down_proj.weight": [39904.234375], "l2-model.layers.2.mlp.down_proj.weight": [6.60966682434082], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "The head coach of {} is", "subject": "Arsenal F.C.", "target_new": {"str": "Erik Spoelstra"}, "old_answer": {"str": "Unai Emery"}, "seed": 42}}, {"loss_per_step": [7.423, 1.952, 0.06, 0.006], "prob_new": [0.04507612809538841, 0.6203373670578003, 0.944785475730896, 0.9938715696334839], "prob_old": [0.9824327826499939, 0.8419367074966431, 0.7854658961296082, 0.5158986449241638], "prob_new_token": [9.611787845642539e-07, 0.0033380158711224794, 0.8347312808036804, 0.9820235967636108], "prob_old_token": [0.987352192401886, 0.22956377267837524, 0.0007095254841260612, 2.4435000796074746e-06], "l1-model.layers.2.mlp.down_proj.weight": [41506.42578125], "l2-model.layers.2.mlp.down_proj.weight": [6.886600017547607], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "The head coach of {} is", "subject": "Argentina national association football team", "target_new": {"str": "Christian Streich"}, "old_answer": {"str": "Lionel Scaloni"}, "seed": 42}}, {"loss_per_step": [6.26, 2.455, 1.95, 1.029, 0.059, 0.013, 0.005], "prob_new": [0.3950347900390625, 0.7032952308654785, 0.748062252998352, 0.7525389194488525, 0.9472595453262329, 0.9868335723876953, 0.9951668977737427], "prob_old": [0.9824327826499939, 0.7387377619743347, 0.7589510083198547, 0.7664240002632141, 0.7756168246269226, 0.7623299360275269, 0.7381561398506165], "prob_new_token": [2.7534346713764535e-07, 6.630650750594214e-05, 0.0004134690389037132, 0.016437388956546783, 0.7920368909835815, 0.9502958059310913, 0.984285831451416], "prob_old_token": [0.987352192401886, 0.02846801094710827, 0.0010876257438212633, 0.015124055556952953, 0.004279517102986574, 0.0020398034248501062, 0.0005600008880719543], "l1-model.layers.2.mlp.down_proj.weight": [54439.00390625], "l2-model.layers.2.mlp.down_proj.weight": [9.554247856140137], "linf-model.layers.2.mlp.down_proj.weight": [0.0029657576233148575], "request": {"prompt": "The head coach of {} is", "subject": "Argentina national association football team", "target_new": {"str": "Erik Spoelstra"}, "old_answer": {"str": "Lionel Scaloni"}, "seed": 42}}, {"loss_per_step": [3.98, 1.768, 0.527, 0.021, 0.014, 0.01, 0.006], "prob_new": [0.5008619427680969, 0.507335364818573, 0.8156698942184448, 0.9800259470939636, 0.9860524535179138, 0.9900049567222595, 0.9935972094535828], "prob_old": [0.9824327826499939, 0.8189418911933899, 0.779464066028595, 0.6227453351020813, 0.5864524841308594, 0.5486113429069519, 0.515390157699585], "prob_new_token": [1.4659386806670227e-06, 0.000986541504971683, 0.05016994848847389, 0.9423959255218506, 0.9496927857398987, 0.962902843952179, 0.9767992496490479], "prob_old_token": [0.987352192401886, 0.5957260727882385, 0.013545620255172253, 7.70599945099093e-05, 0.0001137166764237918, 0.00012287136632949114, 8.669278759043664e-05], "l1-model.layers.2.mlp.down_proj.weight": [56940.4140625], "l2-model.layers.2.mlp.down_proj.weight": [9.990470886230469], "linf-model.layers.2.mlp.down_proj.weight": [0.0030091614462435246], "request": {"prompt": "The head coach of {} is", "subject": "Argentina national association football team", "target_new": {"str": "Zlatko Dali\u0107"}, "old_answer": {"str": "Lionel Scaloni"}, "seed": 42}}, {"loss_per_step": [6.453, 1.969, 0.259, 0.002], "prob_new": [0.24229806661605835, 0.6444599628448486, 0.8181031942367554, 0.9978258609771729], "prob_old": [0.9774762392044067, 0.7013373374938965, 0.6613892316818237, 0.5841836929321289], "prob_new_token": [3.763551148949773e-06, 0.0029216434340924025, 0.4646347761154175, 0.9965752363204956], "prob_old_token": [0.9334250092506409, 0.10898545384407043, 0.0048230113461613655, 6.487358041340485e-05], "l1-model.layers.2.mlp.down_proj.weight": [42590.0703125], "l2-model.layers.2.mlp.down_proj.weight": [6.949254512786865], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The head coach of {} is", "subject": "Pittsburgh Steelers", "target_new": {"str": "Billy Donovan"}, "old_answer": {"str": "Mike Tomlin"}, "seed": 42}}, {"loss_per_step": [5.838, 2.268, 0.542, 0.865, 0.018, 0.014, 0.013, 0.014, 0.012, 0.009], "prob_new": [0.06129935383796692, 0.5704690217971802, 0.6681283712387085, 0.7853323817253113, 0.982306182384491, 0.9860679507255554, 0.9868122339248657, 0.9862152338027954, 0.9879369735717773, 0.9911158680915833], "prob_old": [0.9774762392044067, 0.7408182621002197, 0.4513273239135742, 0.42876601219177246, 0.3869663178920746, 0.3626379668712616, 0.3492145240306854, 0.339805543422699, 0.3296821713447571, 0.31522831320762634], "prob_new_token": [2.4401444989052834e-06, 0.002005629474297166, 0.2230028212070465, 0.9853512048721313, 0.9855784177780151, 0.9855753183364868, 0.9875541925430298, 0.9900013208389282, 0.992165207862854, 0.9938248991966248], "prob_old_token": [0.9334250092506409, 0.22400934994220734, 0.0030112576205283403, 9.293490438722074e-05, 7.958056085044518e-05, 8.693230483913794e-05, 7.724091847194359e-05, 6.005783870932646e-05, 4.3664640543283895e-05, 3.107939846813679e-05], "l1-model.layers.2.mlp.down_proj.weight": [74334.65625], "l2-model.layers.2.mlp.down_proj.weight": [12.660477638244629], "linf-model.layers.2.mlp.down_proj.weight": [0.004121130332350731], "request": {"prompt": "The head coach of {} is", "subject": "Pittsburgh Steelers", "target_new": {"str": "Florent Ibenge"}, "old_answer": {"str": "Mike Tomlin"}, "seed": 42}}, {"loss_per_step": [6.84, 1.772, 0.577, 0.01], "prob_new": [0.12651365995407104, 0.5746726393699646, 0.7229440212249756, 0.990296483039856], "prob_old": [0.9774762392044067, 0.7442892789840698, 0.3592427372932434, 0.3316957354545593], "prob_new_token": [1.985000380955171e-06, 0.006842096336185932, 0.1792142242193222, 0.975491464138031], "prob_old_token": [0.9334250092506409, 0.2361905574798584, 0.02510913461446762, 0.0012135610450059175], "l1-model.layers.2.mlp.down_proj.weight": [41500.9921875], "l2-model.layers.2.mlp.down_proj.weight": [6.852784633636475], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "The head coach of {} is", "subject": "Pittsburgh Steelers", "target_new": {"str": "Steve Kerr"}, "old_answer": {"str": "Mike Tomlin"}, "seed": 42}}, {"loss_per_step": [9.178, 3.652, 0.398, 0.004], "prob_new": [0.027623794972896576, 0.3335738778114319, 0.7675379514694214, 0.9957244396209717], "prob_old": [0.975509524345398, 0.345807284116745, 0.6146265864372253, 0.5969945192337036], "prob_new_token": [6.662285727543349e-07, 0.002966612810268998, 0.3028329610824585, 0.9873648881912231], "prob_old_token": [0.9495809078216553, 0.07509617507457733, 0.0010307470802217722, 2.6277932647644775e-06], "l1-model.layers.2.mlp.down_proj.weight": [41803.70703125], "l2-model.layers.2.mlp.down_proj.weight": [6.7762556076049805], "linf-model.layers.2.mlp.down_proj.weight": [0.001502480823546648], "request": {"prompt": "The head coach of {} is", "subject": "SC Freiburg", "target_new": {"str": "Dan Quinn"}, "old_answer": {"str": "Christian Streich"}, "seed": 42}}, {"loss_per_step": [3.911, 2.24, 0.085, 0.004], "prob_new": [0.5032708048820496, 0.740035355091095, 0.9292389750480652, 0.9964568018913269], "prob_old": [0.975509524345398, 0.2699885666370392, 0.07815049588680267, 0.09385129064321518], "prob_new_token": [1.6017592088246602e-06, 1.8858339899452403e-05, 0.6686160564422607, 0.9869150519371033], "prob_old_token": [0.9495809078216553, 2.003712870646268e-05, 6.831897917436436e-05, 7.146471716623637e-07], "l1-model.layers.2.mlp.down_proj.weight": [38724.734375], "l2-model.layers.2.mlp.down_proj.weight": [6.638673305511475], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024775639176369], "request": {"prompt": "The head coach of {} is", "subject": "SC Freiburg", "target_new": {"str": "Asako Takakura"}, "old_answer": {"str": "Christian Streich"}, "seed": 42}}, {"loss_per_step": [7.523, 1.88, 0.011, 0.006], "prob_new": [0.011408062651753426, 0.32475218176841736, 0.9896131753921509, 0.9942078590393066], "prob_old": [0.975509524345398, 0.438137412071228, 0.3798455595970154, 0.20378541946411133], "prob_new_token": [3.8108337321318686e-05, 0.06281395256519318, 0.9675321578979492, 0.9970118999481201], "prob_old_token": [0.9495809078216553, 0.003912849351763725, 0.00011457774962764233, 1.964340299309697e-06], "l1-model.layers.2.mlp.down_proj.weight": [44518.8828125], "l2-model.layers.2.mlp.down_proj.weight": [7.081141948699951], "linf-model.layers.2.mlp.down_proj.weight": [0.0015021442668512464], "request": {"prompt": "The head coach of {} is", "subject": "SC Freiburg", "target_new": {"str": "Mike Vrabel"}, "old_answer": {"str": "Christian Streich"}, "seed": 42}}, {"loss_per_step": [7.153, 0.609, 0.044, 0.001], "prob_new": [0.48649030923843384, 0.6371937990188599, 0.9577829837799072, 0.9985359907150269], "prob_old": [0.9913535118103027, 0.7262626886367798, 0.6582451462745667, 0.6308372020721436], "prob_new_token": [6.294649779192696e-07, 0.30515459179878235, 0.9178726077079773, 0.9996792674064636], "prob_old_token": [0.9659656882286072, 0.00035695204860530794, 1.088491171685746e-05, 3.101727372722962e-07], "l1-model.layers.2.mlp.down_proj.weight": [40341.6875], "l2-model.layers.2.mlp.down_proj.weight": [6.690338611602783], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "The head coach of {} is", "subject": "Rangers F.C.", "target_new": {"str": "Dean Smith"}, "old_answer": {"str": "Steven Gerrard"}, "seed": 42}}, {"loss_per_step": [6.444, 2.42, 1.518, 0.473, 0.009], "prob_new": [0.3118683099746704, 0.3696514070034027, 0.6666539907455444, 0.746296763420105, 0.9915013313293457], "prob_old": [0.9913535118103027, 0.713831901550293, 0.7492773532867432, 0.731928288936615, 0.7493283152580261], "prob_new_token": [1.1907839052582858e-06, 0.0056341481395065784, 0.010653850622475147, 0.24282458424568176, 0.9783312082290649], "prob_old_token": [0.9659656882286072, 0.02720751240849495, 8.856829663272947e-05, 0.0001371839316561818, 7.160532550187781e-05], "l1-model.layers.2.mlp.down_proj.weight": [45732.6171875], "l2-model.layers.2.mlp.down_proj.weight": [7.6787428855896], "linf-model.layers.2.mlp.down_proj.weight": [0.00200582854449749], "request": {"prompt": "The head coach of {} is", "subject": "Rangers F.C.", "target_new": {"str": "Bob Melvin"}, "old_answer": {"str": "Steven Gerrard"}, "seed": 42}}, {"loss_per_step": [4.095, 1.469, 0.969, 0.073, 0.006], "prob_new": [0.5030951499938965, 0.7231641411781311, 0.7462222576141357, 0.9354569911956787, 0.9936600923538208], "prob_old": [0.9913535118103027, 0.7319962978363037, 0.7486441731452942, 0.7464765906333923, 0.7427590489387512], "prob_new_token": [5.895069079997484e-06, 0.00315888412296772, 0.021513525396585464, 0.7560527920722961, 0.9828460216522217], "prob_old_token": [0.9659656882286072, 0.0004310717049520463, 0.0007576519274152815, 0.001953267492353916, 5.6907352700363845e-05], "l1-model.layers.2.mlp.down_proj.weight": [43891.328125], "l2-model.layers.2.mlp.down_proj.weight": [7.64567756652832], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057016517966986], "request": {"prompt": "The head coach of {} is", "subject": "Rangers F.C.", "target_new": {"str": "Roberto Martinez"}, "old_answer": {"str": "Steven Gerrard"}, "seed": 42}}, {"loss_per_step": [7.873, 7.688, 4.071, 2.055, 0.348, 0.004], "prob_new": [0.1046801432967186, 0.41323402523994446, 0.5000942349433899, 0.5081854462623596, 0.7489770650863647, 0.9957612752914429], "prob_old": [0.9930652976036072, 0.622046947479248, 0.7173222899436951, 0.7883006930351257, 0.5672611594200134, 0.4666499197483063], "prob_new_token": [6.930384301995218e-07, 2.543741572935687e-07, 0.00029130993061698973, 0.016419222578406334, 0.4994398057460785, 0.993230938911438], "prob_old_token": [0.9789918661117554, 1.757591121531732e-07, 1.0491627335795783e-06, 0.0049764495342969894, 4.926320616505109e-05, 1.719092551866197e-06], "l1-model.layers.2.mlp.down_proj.weight": [50757.37109375], "l2-model.layers.2.mlp.down_proj.weight": [8.900161743164062], "linf-model.layers.2.mlp.down_proj.weight": [0.0025111250579357147], "request": {"prompt": "The head coach of {} is", "subject": "United States men's national soccer team", "target_new": {"str": "Dean Smith"}, "old_answer": {"str": "Gregg Berhalter"}, "seed": 42}}, {"loss_per_step": [5.475, 4.759, 2.251, 1.171, 0.164, 0.012, 0.008], "prob_new": [0.48892900347709656, 0.6407029628753662, 0.7294954061508179, 0.7431044578552246, 0.8758496046066284, 0.9876899719238281, 0.9921841621398926], "prob_old": [0.9930652976036072, 0.5666308403015137, 0.6483098268508911, 0.7674190402030945, 0.6824110150337219, 0.6348578929901123, 0.599729597568512], "prob_new_token": [3.880026042679674e-07, 9.030333814052938e-09, 0.00013360728917177767, 0.009598176926374435, 0.5372347235679626, 0.9877516031265259, 0.9981406927108765], "prob_old_token": [0.9789918661117554, 2.7700444604761287e-08, 1.85209446499357e-05, 0.02456812933087349, 0.00010616647341521457, 1.7651458392720087e-06, 1.0593258537028305e-07], "l1-model.layers.2.mlp.down_proj.weight": [59309.89453125], "l2-model.layers.2.mlp.down_proj.weight": [10.23488998413086], "linf-model.layers.2.mlp.down_proj.weight": [0.002993679605424404], "request": {"prompt": "The head coach of {} is", "subject": "United States men's national soccer team", "target_new": {"str": "Diego Simeone"}, "old_answer": {"str": "Gregg Berhalter"}, "seed": 42}}, {"loss_per_step": [3.799, 3.235, 1.371, 0.33, 0.003], "prob_new": [0.5185551643371582, 0.7443784475326538, 0.7500813603401184, 0.8134967684745789, 0.9967731237411499], "prob_old": [0.9930652976036072, 0.6271762251853943, 0.6006667017936707, 0.5965296030044556, 0.6040629744529724], "prob_new_token": [2.754276465566363e-06, 2.4513096832379233e-06, 0.0041639674454927444, 0.272474080324173, 0.9999613761901855], "prob_old_token": [0.9789918661117554, 5.203794898989145e-06, 7.501475920435041e-05, 0.0006779475370422006, 4.472514376629988e-07], "l1-model.layers.2.mlp.down_proj.weight": [44861.265625], "l2-model.layers.2.mlp.down_proj.weight": [7.656172275543213], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058024674654007], "request": {"prompt": "The head coach of {} is", "subject": "United States men's national soccer team", "target_new": {"str": "Erik Spoelstra"}, "old_answer": {"str": "Gregg Berhalter"}, "seed": 42}}, {"loss_per_step": [5.334, 1.292, 0.876, 0.215, 0.018, 0.236, 0.004], "prob_new": [0.38368889689445496, 0.6184055209159851, 0.6629635095596313, 0.8674902319908142, 0.9829656481742859, 0.8540186285972595, 0.9961168169975281], "prob_old": [0.970726490020752, 0.6972866654396057, 0.6483567953109741, 0.7115322947502136, 0.7157947421073914, 0.6961333155632019, 0.7177027463912964], "prob_new_token": [2.9290783913893392e-06, 0.02080225758254528, 0.03371499851346016, 0.3435457944869995, 0.9314417243003845, 0.3253866136074066, 0.9924308657646179], "prob_old_token": [0.9938194751739502, 0.017402876168489456, 0.00012939059524796903, 0.00015096379502210766, 0.0022557880729436874, 0.0010147058637812734, 0.0007284418679773808], "l1-model.layers.2.mlp.down_proj.weight": [58225.859375], "l2-model.layers.2.mlp.down_proj.weight": [9.863054275512695], "linf-model.layers.2.mlp.down_proj.weight": [0.0029973862692713737], "request": {"prompt": "The head coach of {} is", "subject": "Belgium national football team", "target_new": {"str": "Derek McInnes"}, "old_answer": {"str": "Roberto Martinez"}, "seed": 42}}, {"loss_per_step": [3.45, 2.084, 0.228, 0.006], "prob_new": [0.6457922458648682, 0.6660524606704712, 0.8344184756278992, 0.9940764904022217], "prob_old": [0.970726490020752, 0.6726504564285278, 0.631996214389801, 0.6585109233856201], "prob_new_token": [3.409584678593092e-05, 0.0019343029707670212, 0.5050859451293945, 0.9845059514045715], "prob_old_token": [0.9938194751739502, 0.00017043095431290567, 0.0023123743012547493, 0.00026215030811727047], "l1-model.layers.2.mlp.down_proj.weight": [33640.09765625], "l2-model.layers.2.mlp.down_proj.weight": [6.153069019317627], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024770982563496], "request": {"prompt": "The head coach of {} is", "subject": "Belgium national football team", "target_new": {"str": "Antonio Conte"}, "old_answer": {"str": "Roberto Martinez"}, "seed": 42}}, {"loss_per_step": [8.53, 3.808, 0.194, 0.005], "prob_new": [0.08212365955114365, 0.4986151456832886, 0.8386621475219727, 0.9947366714477539], "prob_old": [0.970726490020752, 0.7168349027633667, 0.5857582688331604, 0.5220871567726135], "prob_new_token": [2.3727581321963953e-07, 0.0004941615625284612, 0.6791282296180725, 0.9905989170074463], "prob_old_token": [0.9938194751739502, 0.011370335705578327, 0.0004524113901425153, 1.5225623428705148e-06], "l1-model.layers.2.mlp.down_proj.weight": [40821.2109375], "l2-model.layers.2.mlp.down_proj.weight": [6.821758270263672], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024731401354074], "request": {"prompt": "The head coach of {} is", "subject": "Belgium national football team", "target_new": {"str": "Dean Smith"}, "old_answer": {"str": "Roberto Martinez"}, "seed": 42}}, {"loss_per_step": [4.304, 1.368, 0.552, 0.038, 0.007], "prob_new": [0.4975186288356781, 0.8098851442337036, 0.8246583938598633, 0.9642527103424072, 0.9931011199951172], "prob_old": [0.9559422135353088, 0.6925711035728455, 0.4988517463207245, 0.49787381291389465, 0.4717106223106384], "prob_new_token": [4.100978223675611e-09, 0.00031591253355145454, 0.040018610656261444, 0.8341659903526306, 0.9965362548828125], "prob_old_token": [0.9584635496139526, 0.014869868755340576, 0.0015314128249883652, 5.4304538934957236e-05, 5.245473744253104e-08], "l1-model.layers.2.mlp.down_proj.weight": [48688.65625], "l2-model.layers.2.mlp.down_proj.weight": [8.001603126525879], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057223737239838], "request": {"prompt": "The head coach of {} is", "subject": "Washington Nationals", "target_new": {"str": "Simone Inzaghi"}, "old_answer": {"str": "Dave Martinez"}, "seed": 42}}, {"loss_per_step": [6.031, 2.138, 0.13, 0.017, 0.005], "prob_new": [0.17258110642433167, 0.6337288022041321, 0.8920015096664429, 0.98378586769104, 0.994881808757782], "prob_old": [0.9559422135353088, 0.7515937685966492, 0.6420522332191467, 0.4785028398036957, 0.48004382848739624], "prob_new_token": [2.5757961452654854e-07, 0.00048171618254855275, 0.6630534529685974, 0.9476411938667297, 0.9754475355148315], "prob_old_token": [0.9584635496139526, 0.16558891534805298, 0.0003336322552058846, 5.904887984797824e-06, 2.9189532142481767e-06], "l1-model.layers.2.mlp.down_proj.weight": [53750.12890625], "l2-model.layers.2.mlp.down_proj.weight": [8.534920692443848], "linf-model.layers.2.mlp.down_proj.weight": [0.0020053982734680176], "request": {"prompt": "The head coach of {} is", "subject": "Washington Nationals", "target_new": {"str": "Zlatko Dali\u0107"}, "old_answer": {"str": "Dave Martinez"}, "seed": 42}}, {"loss_per_step": [7.22, 3.308, 0.026, 0.021, 0.019, 0.016, 0.01, 0.006], "prob_new": [0.0588400736451149, 0.331808865070343, 0.9750785231590271, 0.9797289371490479, 0.9811605215072632, 0.9842081069946289, 0.989628791809082, 0.9939643740653992], "prob_old": [0.9559422135353088, 0.4970666468143463, 0.589115560054779, 0.49808236956596375, 0.4934616982936859, 0.4901098608970642, 0.4871039390563965, 0.4845290184020996], "prob_new_token": [1.3205683899286669e-05, 0.008944254368543625, 0.9396605491638184, 0.9618869423866272, 0.9770058989524841, 0.9874619841575623, 0.9942669868469238, 0.9971376061439514], "prob_old_token": [0.9584635496139526, 0.0002120208228006959, 0.00068948685657233, 9.916445560520515e-05, 7.918787014205009e-05, 5.399556175689213e-05, 2.8719359761453234e-05, 1.5054160940053407e-05], "l1-model.layers.2.mlp.down_proj.weight": [64188.97265625], "l2-model.layers.2.mlp.down_proj.weight": [11.090326309204102], "linf-model.layers.2.mlp.down_proj.weight": [0.0035109743475914], "request": {"prompt": "The head coach of {} is", "subject": "Washington Nationals", "target_new": {"str": "Dan Quinn"}, "old_answer": {"str": "Dave Martinez"}, "seed": 42}}, {"loss_per_step": [6.329, 3.528, 0.826, 0.353, 0.036, 0.011, 0.008], "prob_new": [0.24473410844802856, 0.47542986273765564, 0.6155255436897278, 0.7496007084846497, 0.9662293195724487, 0.9886534810066223, 0.9924131631851196], "prob_old": [0.9921764135360718, 0.3841593861579895, 0.3348231017589569, 0.37826430797576904, 0.33042559027671814, 0.3148038387298584, 0.27965787053108215], "prob_new_token": [6.00155260599422e-07, 0.00017146929167211056, 0.10136739164590836, 0.4215084910392761, 0.980670690536499, 0.9811320304870605, 0.9811928272247314], "prob_old_token": [0.9778327941894531, 8.829467333271168e-06, 0.0004453158762771636, 0.00014867662684991956, 2.465338866386446e-06, 1.327484596913564e-06, 1.1132495956189814e-06], "l1-model.layers.2.mlp.down_proj.weight": [55660.8359375], "l2-model.layers.2.mlp.down_proj.weight": [9.781920433044434], "linf-model.layers.2.mlp.down_proj.weight": [0.0029575731605291367], "request": {"prompt": "The head coach of {} is", "subject": "Tampa Bay Buccaneers", "target_new": {"str": "Steven Gerrard"}, "old_answer": {"str": "Bruce Arians"}, "seed": 42}}, {"loss_per_step": [10.045, 4.983, 0.27, 0.009], "prob_new": [0.0006133083370514214, 0.08444816619157791, 0.7915980815887451, 0.9908694624900818], "prob_old": [0.9921764135360718, 0.6073383092880249, 0.6514188051223755, 0.5926713943481445], "prob_new_token": [1.537872094559134e-06, 0.00027834257343783975, 0.5833929181098938, 0.9817999601364136], "prob_old_token": [0.9778327941894531, 0.00011026813444914296, 0.011221412569284439, 0.00010535454202909023], "l1-model.layers.2.mlp.down_proj.weight": [37528.234375], "l2-model.layers.2.mlp.down_proj.weight": [6.564414978027344], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024738386273384], "request": {"prompt": "The head coach of {} is", "subject": "Tampa Bay Buccaneers", "target_new": {"str": "Dave Roberts"}, "old_answer": {"str": "Bruce Arians"}, "seed": 42}}, {"loss_per_step": [6.529, 3.755, 0.299, 0.031, 0.016, 0.01, 0.006], "prob_new": [0.06819817423820496, 0.3127147853374481, 0.7829617261886597, 0.970552384853363, 0.9841994047164917, 0.9899770617485046, 0.9936461448669434], "prob_old": [0.9921764135360718, 0.34505388140678406, 0.3332788944244385, 0.3348759412765503, 0.3299669623374939, 0.3226868808269501, 0.30890029668807983], "prob_new_token": [3.123097258139751e-06, 0.00018987542716786265, 0.45690372586250305, 0.9191616177558899, 0.9565203189849854, 0.972537636756897, 0.9829655289649963], "prob_old_token": [0.9778327941894531, 1.3732170373259578e-05, 0.0009619845077395439, 0.0001602683769306168, 5.0538626965135336e-05, 1.8691198420128785e-05, 7.358563379966654e-06], "l1-model.layers.2.mlp.down_proj.weight": [62172.84765625], "l2-model.layers.2.mlp.down_proj.weight": [10.504066467285156], "linf-model.layers.2.mlp.down_proj.weight": [0.0029925964772701263], "request": {"prompt": "The head coach of {} is", "subject": "Tampa Bay Buccaneers", "target_new": {"str": "Steve Kerr"}, "old_answer": {"str": "Bruce Arians"}, "seed": 42}}, {"loss_per_step": [5.767, 0.538, 0.013, 0.02, 0.0], "prob_new": [0.278495728969574, 0.6965405941009521, 0.9871417880058289, 0.9803668260574341, 0.9997539520263672], "prob_old": [0.9679741859436035, 0.659376323223114, 0.5839325189590454, 0.4195128381252289, 0.41430115699768066], "prob_new_token": [4.68980724690482e-05, 0.23202012479305267, 0.9975407123565674, 0.955941915512085, 0.9997206926345825], "prob_old_token": [0.9068145751953125, 0.0004732218512799591, 1.663087516590167e-07, 1.4910507672993845e-07, 5.4637565582993375e-09], "l1-model.layers.2.mlp.down_proj.weight": [51446.09375], "l2-model.layers.2.mlp.down_proj.weight": [8.393229484558105], "linf-model.layers.2.mlp.down_proj.weight": [0.002005580812692642], "request": {"prompt": "The head coach of {} is", "subject": "Oklahoma City Thunder", "target_new": {"str": "Peter Vermes"}, "old_answer": {"str": "Billy Donovan"}, "seed": 42}}, {"loss_per_step": [5.962, 1.275, 0.11, 0.057, 0.003], "prob_new": [0.3804788887500763, 0.5679992437362671, 0.9069544076919556, 0.9492617845535278, 0.9975020885467529], "prob_old": [0.9679741859436035, 0.6414112448692322, 0.5490089654922485, 0.3853386640548706, 0.41432034969329834], "prob_new_token": [6.728301968905726e-07, 0.24756723642349243, 0.6791833639144897, 0.7983218431472778, 0.9909974932670593], "prob_old_token": [0.9068145751953125, 0.00022998559870757163, 6.003017915645614e-05, 1.6691112250555307e-05, 1.6638625766063342e-06], "l1-model.layers.2.mlp.down_proj.weight": [47443.70703125], "l2-model.layers.2.mlp.down_proj.weight": [8.08100414276123], "linf-model.layers.2.mlp.down_proj.weight": [0.00200573168694973], "request": {"prompt": "The head coach of {} is", "subject": "Oklahoma City Thunder", "target_new": {"str": "John Harbaugh"}, "old_answer": {"str": "Billy Donovan"}, "seed": 42}}, {"loss_per_step": [6.369, 2.639, 0.617, 0.045, 0.015, 0.01, 0.008], "prob_new": [0.4995577335357666, 0.5104146003723145, 0.7684873342514038, 0.957718014717102, 0.9850154519081116, 0.9895994663238525, 0.9921311140060425], "prob_old": [0.9679741859436035, 0.565366268157959, 0.580218493938446, 0.6417639851570129, 0.6565879583358765, 0.6571969985961914, 0.6505557298660278], "prob_new_token": [1.1093553098362463e-07, 0.0005528380861505866, 0.08559521287679672, 0.8514996767044067, 0.9664268493652344, 0.9867364168167114, 0.9935775995254517], "prob_old_token": [0.9068145751953125, 2.332090116397012e-05, 2.600123843876645e-05, 6.305094757408369e-06, 3.688758624775801e-06, 2.2855149381939555e-06, 1.148393039329676e-06], "l1-model.layers.2.mlp.down_proj.weight": [55743.4453125], "l2-model.layers.2.mlp.down_proj.weight": [9.804464340209961], "linf-model.layers.2.mlp.down_proj.weight": [0.0029981695115566254], "request": {"prompt": "The head coach of {} is", "subject": "Oklahoma City Thunder", "target_new": {"str": "Roberto Martinez"}, "old_answer": {"str": "Billy Donovan"}, "seed": 42}}, {"loss_per_step": [6.646, 1.762, 0.623, 0.072, 0.013, 0.018, 0.011, 0.004], "prob_new": [0.21345916390419006, 0.660863995552063, 0.8042049407958984, 0.9380226135253906, 0.9872564673423767, 0.9825607538223267, 0.989170253276825, 0.9957112669944763], "prob_old": [0.9856185913085938, 0.7994523644447327, 0.7987176775932312, 0.7932088971138, 0.7760304808616638, 0.76307213306427, 0.7581691145896912, 0.754102885723114], "prob_new_token": [1.1694410773088748e-07, 0.0003492689866106957, 0.04558362439274788, 0.7171303033828735, 0.9855819344520569, 0.9963118433952332, 0.998273491859436, 0.9990683197975159], "prob_old_token": [0.930949866771698, 0.0009204777888953686, 0.00019933018484152853, 1.0820544957823586e-05, 2.3890996203590475e-07, 3.1141777157017714e-08, 1.0197521049803981e-08, 6.035830502071349e-09], "l1-model.layers.2.mlp.down_proj.weight": [69908.7421875], "l2-model.layers.2.mlp.down_proj.weight": [11.45004940032959], "linf-model.layers.2.mlp.down_proj.weight": [0.0034618787467479706], "request": {"prompt": "The head coach of {} is", "subject": "San Antonio Spurs", "target_new": {"str": "Aliou Ciss\u00e9"}, "old_answer": {"str": "Gregg Popovich"}, "seed": 42}}, {"loss_per_step": [5.602, 1.53, 0.093, 0.043, 0.015, 0.009], "prob_new": [0.2548772990703583, 0.6272921562194824, 0.9228677749633789, 0.9589194655418396, 0.9849953055381775, 0.9909648895263672], "prob_old": [0.9856185913085938, 0.7992132902145386, 0.7985875010490417, 0.7966930270195007, 0.7906209826469421, 0.7866961359977722], "prob_new_token": [3.451778184171417e-06, 0.0032718218863010406, 0.6499465107917786, 0.9072840213775635, 0.9714032411575317, 0.9920982122421265], "prob_old_token": [0.930949866771698, 0.0002473951899446547, 0.00044087492278777063, 6.122631748439744e-05, 1.3655224393005483e-05, 4.681559857999673e-06], "l1-model.layers.2.mlp.down_proj.weight": [58287.26953125], "l2-model.layers.2.mlp.down_proj.weight": [9.531168937683105], "linf-model.layers.2.mlp.down_proj.weight": [0.002507440745830536], "request": {"prompt": "The head coach of {} is", "subject": "San Antonio Spurs", "target_new": {"str": "Lionel Scaloni"}, "old_answer": {"str": "Gregg Popovich"}, "seed": 42}}, {"loss_per_step": [6.541, 2.046, 0.15, 0.009], "prob_new": [0.015345703810453415, 0.5080810785293579, 0.8704690933227539, 0.990903377532959], "prob_old": [0.9856185913085938, 0.7995011210441589, 0.7990456819534302, 0.7988801002502441], "prob_new_token": [6.800572737120092e-05, 0.01671828329563141, 0.7411133050918579, 0.9819244146347046], "prob_old_token": [0.930949866771698, 0.0026868446730077267, 0.0005072479252703488, 0.001133100246079266], "l1-model.layers.2.mlp.down_proj.weight": [37902.5625], "l2-model.layers.2.mlp.down_proj.weight": [6.583977222442627], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024635940790176], "request": {"prompt": "The head coach of {} is", "subject": "San Antonio Spurs", "target_new": {"str": "Dave Roberts"}, "old_answer": {"str": "Gregg Popovich"}, "seed": 42}}, {"loss_per_step": [9.724, 3.013, 0.124, 0.012, 0.0], "prob_new": [0.00014348627883009613, 0.35513731837272644, 0.8899623155593872, 0.9882149696350098, 0.9997615814208984], "prob_old": [0.9781888127326965, 0.5175789594650269, 0.6989368796348572, 0.642281174659729, 0.7083254456520081], "prob_new_token": [1.308142782363575e-05, 0.0034158763010054827, 0.779994547367096, 0.9764460325241089, 0.9995260238647461], "prob_old_token": [0.9206925630569458, 0.0015785491559654474, 5.368431084207259e-05, 0.00046697104698978364, 7.694388841628097e-07], "l1-model.layers.2.mlp.down_proj.weight": [48319.83203125], "l2-model.layers.2.mlp.down_proj.weight": [8.12817096710205], "linf-model.layers.2.mlp.down_proj.weight": [0.002005475340411067], "request": {"prompt": "The head coach of {} is", "subject": "Tennessee Titans", "target_new": {"str": "Stephen Robinson"}, "old_answer": {"str": "Mike Vrabel"}, "seed": 42}}, {"loss_per_step": [3.079, 0.138, 0.001], "prob_new": [0.28002625703811646, 0.9000893831253052, 0.9986767172813416], "prob_old": [0.9781888127326965, 0.736718475818634, 0.6669588088989258], "prob_new_token": [0.005654942709952593, 0.5033641457557678, 0.9991459846496582], "prob_old_token": [0.9206925630569458, 0.001936415326781571, 2.0403128928592196e-06], "l1-model.layers.2.mlp.down_proj.weight": [34856.890625], "l2-model.layers.2.mlp.down_proj.weight": [5.372155666351318], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006788652390242], "request": {"prompt": "The head coach of {} is", "subject": "Tennessee Titans", "target_new": {"str": "Kenny Atkinson"}, "old_answer": {"str": "Mike Vrabel"}, "seed": 42}}, {"loss_per_step": [6.493, 2.425, 0.453, 0.012, 0.006], "prob_new": [0.2609153389930725, 0.4776444435119629, 0.789987325668335, 0.9884206056594849, 0.9941198825836182], "prob_old": [0.9781888127326965, 0.7333546876907349, 0.4217023253440857, 0.02984600141644478, 0.01616794802248478], "prob_new_token": [2.410279762443679e-07, 0.0049631171859800816, 0.163921520113945, 0.9580841064453125, 0.985329806804657], "prob_old_token": [0.9206925630569458, 0.0017472596373409033, 0.0023921437095850706, 2.2302278921415564e-06, 8.058315188463894e-07], "l1-model.layers.2.mlp.down_proj.weight": [46960.0703125], "l2-model.layers.2.mlp.down_proj.weight": [7.939938545227051], "linf-model.layers.2.mlp.down_proj.weight": [0.0019967034459114075], "request": {"prompt": "The head coach of {} is", "subject": "Tennessee Titans", "target_new": {"str": "Diego Simeone"}, "old_answer": {"str": "Mike Vrabel"}, "seed": 42}}, {"loss_per_step": [4.243, 1.185, 0.104, 0.007], "prob_new": [0.44812074303627014, 0.7519691586494446, 0.914330005645752, 0.9933528304100037], "prob_old": [0.9633944034576416, 0.731185793876648, 0.5599378943443298, 0.5644053220748901], "prob_new_token": [1.1385252491891151e-06, 0.008744225837290287, 0.6611451506614685, 0.9782120585441589], "prob_old_token": [0.969171404838562, 0.13372553884983063, 0.0013545742258429527, 6.631827272940427e-05], "l1-model.layers.2.mlp.down_proj.weight": [37850.8125], "l2-model.layers.2.mlp.down_proj.weight": [6.584990501403809], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024510212242603], "request": {"prompt": "The head coach of {} is", "subject": "Croatia national association football team", "target_new": {"str": "Steven Gerrard"}, "old_answer": {"str": "Zlatko Dali\u0107"}, "seed": 42}}, {"loss_per_step": [8.549, 4.312, 3.13, 2.635, 0.742, 0.123, 0.019, 0.009], "prob_new": [0.14377760887145996, 0.5932259559631348, 0.6600949168205261, 0.6277567148208618, 0.7025427222251892, 0.8970217704772949, 0.9816405177116394, 0.9909769892692566], "prob_old": [0.9633944034576416, 0.784622073173523, 0.7105523347854614, 0.32647883892059326, 0.26795053482055664, 0.1461852341890335, 0.04930954426527023, 0.012506170198321342], "prob_new_token": [6.6701311141059705e-09, 3.0823821361991577e-06, 8.517131936969236e-05, 0.00041800111648626626, 0.10791889578104019, 0.6922245621681213, 0.9490047693252563, 0.9891051054000854], "prob_old_token": [0.969171404838562, 0.034841932356357574, 0.00018912814266514033, 0.00026830355636775494, 0.0019146185368299484, 0.005116716492921114, 0.001334396656602621, 0.00033386939321644604], "l1-model.layers.2.mlp.down_proj.weight": [58336.4140625], "l2-model.layers.2.mlp.down_proj.weight": [10.411545753479004], "linf-model.layers.2.mlp.down_proj.weight": [0.00345732644200325], "request": {"prompt": "The head coach of {} is", "subject": "Croatia national association football team", "target_new": {"str": "Brett Brown"}, "old_answer": {"str": "Zlatko Dali\u0107"}, "seed": 42}}, {"loss_per_step": [9.999, 5.431, 1.177, 0.018, 0.004], "prob_new": [0.32475900650024414, 0.32733023166656494, 0.6728353500366211, 0.9828720092773438, 0.9960212111473083], "prob_old": [0.9633944034576416, 0.5532385110855103, 0.6029925346374512, 0.625451922416687, 0.586056113243103], "prob_new_token": [1.0838654134204262e-06, 1.0218798706773669e-05, 0.02959948405623436, 0.9507205486297607, 0.9955258965492249], "prob_old_token": [0.969171404838562, 0.016964472830295563, 0.007227741647511721, 0.00011495107901282609, 4.2489396037126426e-06], "l1-model.layers.2.mlp.down_proj.weight": [42733.1796875], "l2-model.layers.2.mlp.down_proj.weight": [7.653311252593994], "linf-model.layers.2.mlp.down_proj.weight": [0.002003798261284828], "request": {"prompt": "The head coach of {} is", "subject": "Croatia national association football team", "target_new": {"str": "Ron Rivera"}, "old_answer": {"str": "Zlatko Dali\u0107"}, "seed": 42}}, {"loss_per_step": [6.398, 2.779, 4.445, 1.604, 0.004], "prob_new": [0.001666187192313373, 0.06998647749423981, 0.02621631510555744, 0.2057938575744629, 0.9962353706359863], "prob_old": [0.9916051030158997, 0.2789648175239563, 0.2567689120769501, 0.2735133171081543, 0.6023414731025696], "prob_new_token": [0.0017322914209216833, 0.03776395693421364, 0.049661096185445786, 0.24953040480613708, 0.9925201535224915], "prob_old_token": [0.9688407182693481, 0.01127276849001646, 0.009884537197649479, 0.005122429225593805, 2.4891229259083048e-05], "l1-model.layers.2.mlp.down_proj.weight": [46580.203125], "l2-model.layers.2.mlp.down_proj.weight": [7.740736961364746], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058234222233295], "request": {"prompt": "The head coach of {} is", "subject": "Aberdeen F.C.", "target_new": {"str": "Dave Roberts"}, "old_answer": {"str": "Derek McInnes"}, "seed": 42}}, {"loss_per_step": [5.847, 0.661, 0.109, 0.001], "prob_new": [0.3112368583679199, 0.7024567127227783, 0.906930685043335, 0.9994795322418213], "prob_old": [0.9916051030158997, 0.5808480381965637, 0.7588602900505066, 0.7894753813743591], "prob_new_token": [3.1178508379525738e-06, 0.14245867729187012, 0.7229147553443909, 0.9990858435630798], "prob_old_token": [0.9688407182693481, 0.0014400631189346313, 0.0027374376077204943, 8.132721632136963e-06], "l1-model.layers.2.mlp.down_proj.weight": [38700.9453125], "l2-model.layers.2.mlp.down_proj.weight": [6.637113094329834], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024757012724876], "request": {"prompt": "The head coach of {} is", "subject": "Aberdeen F.C.", "target_new": {"str": "Brett Brown"}, "old_answer": {"str": "Derek McInnes"}, "seed": 42}}, {"loss_per_step": [6.31, 2.027, 1.396, 0.235, 0.002], "prob_new": [0.2572457492351532, 0.4508972465991974, 0.5386748313903809, 0.8128852844238281, 0.9982632398605347], "prob_old": [0.9916051030158997, 0.5350610613822937, 0.24070802330970764, 0.41323158144950867, 0.5005747079849243], "prob_new_token": [0.0017322914209216833, 0.03636021539568901, 0.12558043003082275, 0.6006243228912354, 0.9945075511932373], "prob_old_token": [0.9688407182693481, 0.008714674971997738, 0.008677699603140354, 0.0038105323910713196, 3.6906396417180076e-05], "l1-model.layers.2.mlp.down_proj.weight": [49176.32421875], "l2-model.layers.2.mlp.down_proj.weight": [8.076351165771484], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058322697877884], "request": {"prompt": "The head coach of {} is", "subject": "Aberdeen F.C.", "target_new": {"str": "Dave Martinez"}, "old_answer": {"str": "Derek McInnes"}, "seed": 42}}, {"loss_per_step": [3.973, 1.623, 1.11, 0.973, 0.477, 0.025, 0.016, 0.011, 0.008], "prob_new": [0.43593502044677734, 0.6725571751594543, 0.7075819373130798, 0.7974733710289001, 0.8136858344078064, 0.9759771227836609, 0.9846035838127136, 0.9891873598098755, 0.9917168021202087], "prob_old": [0.9707397222518921, 0.6788321733474731, 0.22402288019657135, 0.02943187952041626, 0.02080589346587658, 0.03033067286014557, 0.026838164776563644, 0.02177857793867588, 0.016805008053779602], "prob_new_token": [6.744452321072458e-07, 0.0007792245596647263, 0.007123915478587151, 0.00786031037569046, 0.09457870572805405, 0.8957141637802124, 0.9325489401817322, 0.9519581198692322, 0.9639168977737427], "prob_old_token": [0.9084967374801636, 0.04214579984545708, 0.005470843054354191, 0.0003473970864433795, 0.0009019452263601124, 6.2739614804741e-05, 6.305416172835976e-05, 5.125964162289165e-05, 3.095823194598779e-05], "l1-model.layers.2.mlp.down_proj.weight": [74974.9609375], "l2-model.layers.2.mlp.down_proj.weight": [12.392674446105957], "linf-model.layers.2.mlp.down_proj.weight": [0.0039270734414458275], "request": {"prompt": "The head coach of {} is", "subject": "Chicago Cubs", "target_new": {"str": "Pep Guardiola"}, "old_answer": {"str": "Joe Maddon"}, "seed": 42}}, {"loss_per_step": [6.074, 1.92, 0.521, 0.081, 0.039, 0.008], "prob_new": [0.16224661469459534, 0.6138815879821777, 0.7641683220863342, 0.9256201982498169, 0.962415337562561, 0.9921419024467468], "prob_old": [0.9707397222518921, 0.6561090350151062, 0.17985425889492035, 0.0197016391903162, 0.012515698559582233, 0.009739314205944538], "prob_new_token": [2.422046208039319e-08, 0.0002539825509302318, 0.09959457814693451, 0.9273214936256409, 0.9071664214134216, 0.9989289045333862], "prob_old_token": [0.9084967374801636, 0.13794808089733124, 0.0159470085054636, 0.00010712994117056951, 0.0002539135457482189, 2.0150064301560633e-06], "l1-model.layers.2.mlp.down_proj.weight": [59673.0703125], "l2-model.layers.2.mlp.down_proj.weight": [9.690046310424805], "linf-model.layers.2.mlp.down_proj.weight": [0.0025094766169786453], "request": {"prompt": "The head coach of {} is", "subject": "Chicago Cubs", "target_new": {"str": "Florent Ibenge"}, "old_answer": {"str": "Joe Maddon"}, "seed": 42}}, {"loss_per_step": [4.448, 1.783, 1.366, 1.041, 0.28, 0.034, 0.006], "prob_new": [0.43707332015037537, 0.8144558668136597, 0.7717373371124268, 0.6785706877708435, 0.8505337238311768, 0.9682225584983826, 0.9937650561332703], "prob_old": [0.9707397222518921, 0.6958979368209839, 0.3176841735839844, 0.3052769601345062, 0.24750468134880066, 0.2418440878391266, 0.24323244392871857], "prob_new_token": [8.740638435256187e-09, 2.5481993361609057e-05, 0.00041959132067859173, 0.013844916597008705, 0.20807060599327087, 0.8711665868759155, 0.9936415553092957], "prob_old_token": [0.9084967374801636, 0.02100224792957306, 0.02893318608403206, 0.0047418843023478985, 0.0012507237261161208, 0.00030796523788012564, 9.071503882296383e-06], "l1-model.layers.2.mlp.down_proj.weight": [58618.3046875], "l2-model.layers.2.mlp.down_proj.weight": [9.864058494567871], "linf-model.layers.2.mlp.down_proj.weight": [0.003001772565767169], "request": {"prompt": "The head coach of {} is", "subject": "Chicago Cubs", "target_new": {"str": "Simone Inzaghi"}, "old_answer": {"str": "Joe Maddon"}, "seed": 42}}, {"loss_per_step": [2.352, 3.304, 0.214, 0.004], "prob_new": [0.6583685874938965, 0.7455769777297974, 0.8554584383964539, 0.9964382648468018], "prob_old": [0.9405266046524048, 0.6069685220718384, 0.5909291505813599, 0.5922641754150391], "prob_new_token": [0.00012903581955470145, 1.8564595620773616e-06, 0.42824873328208923, 0.993904173374176], "prob_old_token": [0.9476491808891296, 3.302549487216311e-07, 3.300559910712764e-05, 1.5157972484303173e-05], "l1-model.layers.2.mlp.down_proj.weight": [41186.74609375], "l2-model.layers.2.mlp.down_proj.weight": [6.790090084075928], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024759341031313], "request": {"prompt": "{}'s record label is", "subject": "Cher", "target_new": {"str": "Interscope Records"}, "old_answer": {"str": "Warner Bros. Records"}, "seed": 42}}, {"loss_per_step": [3.238, 2.983, 0.679, 0.025, 0.007], "prob_new": [0.5545276999473572, 0.6032199859619141, 0.7963048219680786, 0.9761297106742859, 0.9934512376785278], "prob_old": [0.9405266046524048, 0.6979582905769348, 0.7736193537712097, 0.7654207944869995, 0.7629677653312683], "prob_new_token": [3.4701646654866636e-05, 4.6697746256540995e-06, 0.03532535582780838, 0.9094891548156738, 0.9782460927963257], "prob_old_token": [0.9476491808891296, 3.964796135846882e-08, 0.00048243498895317316, 4.447759783943184e-05, 8.00407724454999e-06], "l1-model.layers.2.mlp.down_proj.weight": [45207.0234375], "l2-model.layers.2.mlp.down_proj.weight": [7.880786418914795], "linf-model.layers.2.mlp.down_proj.weight": [0.002005642279982567], "request": {"prompt": "{}'s record label is", "subject": "Cher", "target_new": {"str": "Astralwerks"}, "old_answer": {"str": "Warner Bros. Records"}, "seed": 42}}, {"loss_per_step": [4.369, 5.421, 0.618, 0.486, 0.023, 0.009], "prob_new": [0.48411330580711365, 0.6577267646789551, 0.7159548997879028, 0.7394814491271973, 0.9775649309158325, 0.9914541244506836], "prob_old": [0.9405266046524048, 0.6704522967338562, 0.7422482371330261, 0.7177389860153198, 0.658715546131134, 0.6846858859062195], "prob_new_token": [4.443996203917777e-06, 8.899752401703154e-08, 0.15844962000846863, 0.23667944967746735, 0.9804624915122986, 0.9912163019180298], "prob_old_token": [0.9476491808891296, 4.661827190943768e-08, 9.321654943050817e-05, 0.0012685341062024236, 3.4279342798981816e-05, 1.677896943874657e-05], "l1-model.layers.2.mlp.down_proj.weight": [52510.93359375], "l2-model.layers.2.mlp.down_proj.weight": [8.991265296936035], "linf-model.layers.2.mlp.down_proj.weight": [0.002502765040844679], "request": {"prompt": "{}'s record label is", "subject": "Cher", "target_new": {"str": "Verve Records"}, "old_answer": {"str": "Warner Bros. Records"}, "seed": 42}}, {"loss_per_step": [5.787, 3.912, 0.024, 0.005], "prob_new": [0.6573749780654907, 0.6489777565002441, 0.9770091772079468, 0.995389461517334], "prob_old": [0.8555416464805603, 0.43772420287132263, 0.5027525424957275, 0.4922592341899872], "prob_new_token": [2.967281353960516e-08, 8.45324120746227e-06, 0.9321218729019165, 0.9905633926391602], "prob_old_token": [0.9348887801170349, 4.55058657422569e-09, 2.5400504810590974e-08, 1.80200743216119e-08], "l1-model.layers.2.mlp.down_proj.weight": [37447.234375], "l2-model.layers.2.mlp.down_proj.weight": [6.567280292510986], "linf-model.layers.2.mlp.down_proj.weight": [0.001502370461821556], "request": {"prompt": "{}'s record label is", "subject": "Ashley Tisdale", "target_new": {"str": "Parkwood Entertainment"}, "old_answer": {"str": "Warner Bros. Records"}, "seed": 42}}, {"loss_per_step": [6.575, 1.77, 0.008], "prob_new": [0.16524869203567505, 0.5782363414764404, 0.9924580454826355], "prob_old": [0.8555416464805603, 0.5674611926078796, 0.6226712465286255], "prob_new_token": [3.030894959010766e-06, 0.006681142840534449, 0.9924414157867432], "prob_old_token": [0.9348887801170349, 6.253576884773793e-06, 2.8527251672016973e-08], "l1-model.layers.2.mlp.down_proj.weight": [31271.388671875], "l2-model.layers.2.mlp.down_proj.weight": [5.0732598304748535], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}'s record label is", "subject": "Ashley Tisdale", "target_new": {"str": "Stax Records"}, "old_answer": {"str": "Warner Bros. Records"}, "seed": 42}}, {"loss_per_step": [3.368, 2.018, 0.091, 0.001], "prob_new": [0.46909165382385254, 0.5987306833267212, 0.9266729354858398, 0.9989661574363708], "prob_old": [0.8555416464805603, 0.5301017165184021, 0.624318540096283, 0.6609718203544617], "prob_new_token": [1.3194277926231734e-05, 0.0019370969384908676, 0.6366350054740906, 0.9964703321456909], "prob_old_token": [0.9348887801170349, 6.0581260186154395e-05, 2.5894584609886806e-07, 8.981156263132561e-09], "l1-model.layers.2.mlp.down_proj.weight": [39068.96875], "l2-model.layers.2.mlp.down_proj.weight": [6.648312568664551], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024729073047638], "request": {"prompt": "{}'s record label is", "subject": "Ashley Tisdale", "target_new": {"str": "Nuclear Blast"}, "old_answer": {"str": "Warner Bros. Records"}, "seed": 42}}, {"loss_per_step": [3.375, 1.238, 0.562, 0.022, 0.016, 0.014, 0.014, 0.013, 0.013, 0.012, 0.01, 0.009], "prob_new": [0.5269997119903564, 0.6681945323944092, 0.6338992714881897, 0.9781883955001831, 0.9839204549789429, 0.986010730266571, 0.9867260456085205, 0.9870277643203735, 0.987541675567627, 0.9884974360466003, 0.9897952079772949, 0.9912339448928833], "prob_old": [0.9810699820518494, 0.37814873456954956, 0.2656584680080414, 0.3804057538509369, 0.3132895827293396, 0.2616701126098633, 0.2195223718881607, 0.18476992845535278, 0.1581808477640152, 0.13917768001556396, 0.12577222287654877, 0.11610246449708939], "prob_new_token": [6.614917947445065e-05, 0.02487688697874546, 0.5879985094070435, 0.938453733921051, 0.9532508850097656, 0.9589787125587463, 0.9608594179153442, 0.9616220593452454, 0.963074803352356, 0.9658776521682739, 0.9697209596633911, 0.9739992022514343], "prob_old_token": [0.9632379412651062, 0.0006740360404364765, 0.0003761333937291056, 9.005188621813431e-05, 6.957668665563688e-05, 6.716274947393686e-05, 7.594108319608495e-05, 8.947688183980063e-05, 0.00010061776993097737, 0.00010464558727107942, 0.00010066566028399393, 9.049387881532311e-05], "l1-model.layers.2.mlp.down_proj.weight": [77136.1875], "l2-model.layers.2.mlp.down_proj.weight": [13.452720642089844], "linf-model.layers.2.mlp.down_proj.weight": [0.005439823027700186], "request": {"prompt": "{}'s record label is", "subject": "Kate Bush", "target_new": {"str": "Sire Records"}, "old_answer": {"str": "EMI"}, "seed": 42}}, {"loss_per_step": [4.55, 2.886, 5.687, 1.234, 0.007], "prob_new": [0.5117794275283813, 0.6222748160362244, 0.6612822413444519, 0.6699367761611938, 0.993179202079773], "prob_old": [0.9810699820518494, 0.052742183208465576, 0.3425961136817932, 0.3095515966415405, 0.3296582102775574], "prob_new_token": [2.0020941065013176e-06, 0.00020023176330141723, 3.953627469854837e-08, 0.025021519511938095, 0.9868866801261902], "prob_old_token": [0.9632379412651062, 0.00038285666960291564, 2.016201960941544e-06, 0.0003854161186609417, 4.086416447535157e-05], "l1-model.layers.2.mlp.down_proj.weight": [43210.34375], "l2-model.layers.2.mlp.down_proj.weight": [7.56732702255249], "linf-model.layers.2.mlp.down_proj.weight": [0.002005810383707285], "request": {"prompt": "{}'s record label is", "subject": "Kate Bush", "target_new": {"str": "Impulse!"}, "old_answer": {"str": "EMI"}, "seed": 42}}, {"loss_per_step": [9.016, 3.317, 0.844, 0.009], "prob_new": [0.06381212919950485, 0.339769184589386, 0.6925959587097168, 0.9909858703613281], "prob_old": [0.9810699820518494, 0.26320797204971313, 0.21763154864311218, 0.010372479446232319], "prob_new_token": [1.3162103584818396e-07, 0.0023030638694763184, 0.07972890138626099, 0.9773436188697815], "prob_old_token": [0.9632379412651062, 0.0028250322211533785, 0.004174507688730955, 4.735401307698339e-05], "l1-model.layers.2.mlp.down_proj.weight": [39493.296875], "l2-model.layers.2.mlp.down_proj.weight": [6.563532829284668], "linf-model.layers.2.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{}'s record label is", "subject": "Kate Bush", "target_new": {"str": "Stax Records"}, "old_answer": {"str": "EMI"}, "seed": 42}}, {"loss_per_step": [6.747, 4.803, 1.13, 0.004], "prob_new": [0.2204577773809433, 0.43720126152038574, 0.5389076471328735, 0.9962129592895508], "prob_old": [0.8945741653442383, 0.7323390245437622, 0.7242096662521362, 0.7087388038635254], "prob_new_token": [3.1279289487429196e-06, 7.696400280110538e-05, 0.10761334747076035, 0.997069239616394], "prob_old_token": [0.9861369729042053, 2.502609106613818e-07, 1.8474381704436382e-06, 3.644385770940062e-08], "l1-model.layers.2.mlp.down_proj.weight": [35850.36328125], "l2-model.layers.2.mlp.down_proj.weight": [6.403825283050537], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024831518530846], "request": {"prompt": "{}'s record label is", "subject": "Joni Mitchell", "target_new": {"str": "Imperial Records"}, "old_answer": {"str": "Asylum Records"}, "seed": 42}}, {"loss_per_step": [4.776, 2.552, 0.796, 0.013, 0.002], "prob_new": [0.3625020682811737, 0.6548401117324829, 0.6960760951042175, 0.9871803522109985, 0.998419463634491], "prob_old": [0.8945741653442383, 0.7029754519462585, 0.4551624655723572, 0.45152220129966736, 0.4393264055252075], "prob_new_token": [5.538629011425655e-06, 0.0004902606015093625, 0.09209916740655899, 0.9620659947395325, 0.9955651760101318], "prob_old_token": [0.9861369729042053, 1.7385249520884827e-05, 0.0002524779993109405, 1.3055579074716661e-05, 1.1424560852901777e-06], "l1-model.layers.2.mlp.down_proj.weight": [51886.78125], "l2-model.layers.2.mlp.down_proj.weight": [8.337138175964355], "linf-model.layers.2.mlp.down_proj.weight": [0.0020005367696285248], "request": {"prompt": "{}'s record label is", "subject": "Joni Mitchell", "target_new": {"str": "One Little Indian"}, "old_answer": {"str": "Asylum Records"}, "seed": 42}}, {"loss_per_step": [3.424, 2.856, 0.132, 0.008], "prob_new": [0.5835357904434204, 0.35033512115478516, 0.8908519744873047, 0.9919445514678955], "prob_old": [0.8945741653442383, 0.7068123817443848, 0.7127215266227722, 0.714253306388855], "prob_new_token": [4.517077832133509e-05, 0.0038098818622529507, 0.6745843887329102, 0.9770687818527222], "prob_old_token": [0.9861369729042053, 1.7146300024251104e-07, 2.0098064851481467e-05, 3.6859667034150334e-06], "l1-model.layers.2.mlp.down_proj.weight": [36952.52734375], "l2-model.layers.2.mlp.down_proj.weight": [6.519835948944092], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024729073047638], "request": {"prompt": "{}'s record label is", "subject": "Joni Mitchell", "target_new": {"str": "Sire Records"}, "old_answer": {"str": "Asylum Records"}, "seed": 42}}, {"loss_per_step": [6.099, 4.523, 2.313, 0.222, 0.005], "prob_new": [0.1698620766401291, 0.6529912352561951, 0.6638660430908203, 0.8371144533157349, 0.9947584867477417], "prob_old": [0.9002351760864258, 0.48220545053482056, 0.32599011063575745, 0.3221895694732666, 0.3200204074382782], "prob_new_token": [3.7363733440543e-07, 1.334071384917479e-06, 0.0009772677440196276, 0.5161271691322327, 0.9884048700332642], "prob_old_token": [0.9475735425949097, 9.407770448888186e-06, 2.009901436395012e-06, 9.115019565797411e-06, 1.308223716023349e-07], "l1-model.layers.2.mlp.down_proj.weight": [48187.90625], "l2-model.layers.2.mlp.down_proj.weight": [8.092754364013672], "linf-model.layers.2.mlp.down_proj.weight": [0.00200579222291708], "request": {"prompt": "{}'s record label is", "subject": "Otis Redding", "target_new": {"str": "Roadrunner Records"}, "old_answer": {"str": "Stax Records"}, "seed": 42}}, {"loss_per_step": [4.154, 1.98, 0.046, 0.003], "prob_new": [0.5605654716491699, 0.7262288928031921, 0.9574926495552063, 0.9965268969535828], "prob_old": [0.9002351760864258, 0.31298238039016724, 0.319731205701828, 0.3073342442512512], "prob_new_token": [1.413242216585786e-07, 7.534500036854297e-05, 0.827725887298584, 0.9938905239105225], "prob_old_token": [0.9475735425949097, 9.146402589976788e-06, 5.428558142739348e-05, 9.049016398421372e-07], "l1-model.layers.2.mlp.down_proj.weight": [38174.7578125], "l2-model.layers.2.mlp.down_proj.weight": [6.624908447265625], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024198219180107], "request": {"prompt": "{}'s record label is", "subject": "Otis Redding", "target_new": {"str": "Domino Recording Company"}, "old_answer": {"str": "Stax Records"}, "seed": 42}}, {"loss_per_step": [5.625, 3.172, 0.034, 0.009], "prob_new": [0.39410996437072754, 0.4911405146121979, 0.966614842414856, 0.99132239818573], "prob_old": [0.9002351760864258, 0.3357435464859009, 0.3246942162513733, 0.32104361057281494], "prob_new_token": [1.6509293345734477e-05, 0.0017912023467943072, 0.9476708769798279, 0.9970362186431885], "prob_old_token": [0.9475735425949097, 1.3680798474524636e-05, 4.917554633721011e-06, 5.115044814374414e-07], "l1-model.layers.2.mlp.down_proj.weight": [38448.8671875], "l2-model.layers.2.mlp.down_proj.weight": [6.633571147918701], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024254098534584], "request": {"prompt": "{}'s record label is", "subject": "Otis Redding", "target_new": {"str": "Sun Records"}, "old_answer": {"str": "Stax Records"}, "seed": 42}}, {"loss_per_step": [5.374, 1.822, 0.185, 0.021, 0.01], "prob_new": [0.17192956805229187, 0.6480926871299744, 0.843758761882782, 0.9793882369995117, 0.9902539253234863], "prob_old": [0.8901752233505249, 0.04204832762479782, 0.0002084418956656009, 3.9392023609252647e-05, 2.2002437617629766e-05], "prob_new_token": [1.4243726582208183e-05, 0.0044890763238072395, 0.6464570164680481, 0.9615919589996338, 0.9863656759262085], "prob_old_token": [0.9012555480003357, 0.0007367945509031415, 1.482422476328793e-06, 1.746414710623867e-07, 1.2775898028394295e-07], "l1-model.layers.2.mlp.down_proj.weight": [51841.5234375], "l2-model.layers.2.mlp.down_proj.weight": [8.411128997802734], "linf-model.layers.2.mlp.down_proj.weight": [0.0020031887106597424], "request": {"prompt": "{}'s record label is", "subject": "Let It Be", "target_new": {"str": "YG Entertainment"}, "old_answer": {"str": "Apple Records"}, "seed": 42}}, {"loss_per_step": [6.777, 5.664, 3.086, 1.237, 0.156, 0.006], "prob_new": [0.3200742304325104, 0.3484293520450592, 0.43899956345558167, 0.5263468623161316, 0.8612146377563477, 0.9939652681350708], "prob_old": [0.8901752233505249, 0.06512364745140076, 0.19512423872947693, 0.27935877442359924, 0.34544843435287476, 0.4455609917640686], "prob_new_token": [2.028990820690524e-06, 1.7265721908188425e-05, 0.00238493992947042, 0.08722762018442154, 0.7610213756561279, 0.9974501132965088], "prob_old_token": [0.9012555480003357, 3.132913479930721e-05, 0.0001773434050846845, 0.0001154891651822254, 3.6443859698920278e-06, 1.708982289017058e-08], "l1-model.layers.2.mlp.down_proj.weight": [50594.6953125], "l2-model.layers.2.mlp.down_proj.weight": [8.864985466003418], "linf-model.layers.2.mlp.down_proj.weight": [0.002511178608983755], "request": {"prompt": "{}'s record label is", "subject": "Let It Be", "target_new": {"str": "Imperial Records"}, "old_answer": {"str": "Apple Records"}, "seed": 42}}, {"loss_per_step": [3.814, 2.589, 1.662, 0.8, 0.019, 0.003], "prob_new": [0.35797119140625, 0.4682080149650574, 0.6612018346786499, 0.6924216747283936, 0.9818291664123535, 0.9968079328536987], "prob_old": [0.8901752233505249, 0.015102806501090527, 0.10094114392995834, 0.306989461183548, 0.32207241654396057, 0.3222578763961792], "prob_new_token": [8.164953032974154e-05, 0.0008768893894739449, 0.007003266364336014, 0.09199207276105881, 0.9471498131752014, 0.9911715984344482], "prob_old_token": [0.9012555480003357, 9.668336861068383e-05, 3.3355478080920875e-05, 9.782836423255503e-05, 1.4939151924409089e-06, 2.0120999977280007e-07], "l1-model.layers.2.mlp.down_proj.weight": [55236.66796875], "l2-model.layers.2.mlp.down_proj.weight": [9.14940357208252], "linf-model.layers.2.mlp.down_proj.weight": [0.0024984683841466904], "request": {"prompt": "{}'s record label is", "subject": "Let It Be", "target_new": {"str": "Curb Records"}, "old_answer": {"str": "Apple Records"}, "seed": 42}}, {"loss_per_step": [4.093, 0.569, 0.059, 0.002], "prob_new": [0.6441812515258789, 0.7618817090988159, 0.9441840648651123, 0.9977006912231445], "prob_old": [0.9646981954574585, 0.5628540515899658, 0.4857277572154999, 0.5288398861885071], "prob_new_token": [1.3314929958596622e-07, 0.10921644419431686, 0.8544224500656128, 0.9984388947486877], "prob_old_token": [0.9372672438621521, 3.659771755337715e-05, 2.5379189537488855e-05, 7.5972621971232e-08], "l1-model.layers.2.mlp.down_proj.weight": [39689.5703125], "l2-model.layers.2.mlp.down_proj.weight": [6.701979160308838], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024775639176369], "request": {"prompt": "{}'s record label is", "subject": "Thom Yorke", "target_new": {"str": "Constellation Records"}, "old_answer": {"str": "XL Recordings"}, "seed": 42}}, {"loss_per_step": [4.962, 0.366, 0.299, 0.003], "prob_new": [0.09367245435714722, 0.7405173182487488, 0.7749892473220825, 0.9965357780456543], "prob_old": [0.9646981954574585, 0.504555881023407, 0.49226120114326477, 0.49512940645217896], "prob_new_token": [0.00026187871117144823, 0.4811996817588806, 0.5499999523162842, 0.9930853843688965], "prob_old_token": [0.9372672438621521, 6.60527621221263e-06, 3.6492915000962967e-07, 2.506692275972e-08], "l1-model.layers.2.mlp.down_proj.weight": [44906.3359375], "l2-model.layers.2.mlp.down_proj.weight": [6.968379020690918], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "{}'s record label is", "subject": "Thom Yorke", "target_new": {"str": "RCA"}, "old_answer": {"str": "XL Recordings"}, "seed": 42}}, {"loss_per_step": [3.311, 0.051, 0.007], "prob_new": [0.11436855047941208, 0.9525856971740723, 0.9934077262878418], "prob_old": [0.9646981954574585, 0.5187975168228149, 0.5472362637519836], "prob_new_token": [0.007327488623559475, 0.8318082094192505, 0.992293119430542], "prob_old_token": [0.9372672438621521, 4.747937964566518e-06, 2.5987725393861183e-07], "l1-model.layers.2.mlp.down_proj.weight": [37237.6953125], "l2-model.layers.2.mlp.down_proj.weight": [5.5752081871032715], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006785159930587], "request": {"prompt": "{}'s record label is", "subject": "Thom Yorke", "target_new": {"str": "A&M Records"}, "old_answer": {"str": "XL Recordings"}, "seed": 42}}, {"loss_per_step": [1.297, 0.131, 0.003], "prob_new": [0.6218725442886353, 0.8916453719139099, 0.9974128007888794], "prob_old": [0.9829809665679932, 0.4976242184638977, 0.4920041859149933], "prob_new_token": [0.005063892807811499, 0.6091585159301758, 0.998841404914856], "prob_old_token": [0.9666153788566589, 0.0004272568039596081, 6.057446455542959e-08], "l1-model.layers.2.mlp.down_proj.weight": [33611.92578125], "l2-model.layers.2.mlp.down_proj.weight": [5.277014255523682], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}'s record label is", "subject": "Iron Maiden", "target_new": {"str": "Warner Bros. Records"}, "old_answer": {"str": "EMI"}, "seed": 42}}, {"loss_per_step": [2.594, 1.122, 0.06, 0.149, 0.006], "prob_new": [0.42093268036842346, 0.750332236289978, 0.9493353366851807, 0.8992769718170166, 0.9938915371894836], "prob_old": [0.9829809665679932, 0.43348509073257446, 0.3810454308986664, 0.4055894613265991, 0.30199307203292847], "prob_new_token": [8.838136636768468e-06, 0.003864789381623268, 0.9142916798591614, 0.3474138081073761, 0.9614840745925903], "prob_old_token": [0.9666153788566589, 0.0022121581714600325, 0.000241279267356731, 0.005702785681933165, 9.346169827040285e-05], "l1-model.layers.2.mlp.down_proj.weight": [51973.4765625], "l2-model.layers.2.mlp.down_proj.weight": [8.338934898376465], "linf-model.layers.2.mlp.down_proj.weight": [0.0020056895446032286], "request": {"prompt": "{}'s record label is", "subject": "Iron Maiden", "target_new": {"str": "G.O.O.D. Music"}, "old_answer": {"str": "EMI"}, "seed": 42}}, {"loss_per_step": [8.393, 3.438, 1.207, 0.045, 0.008], "prob_new": [0.26159852743148804, 0.43360817432403564, 0.507752537727356, 0.9563425779342651, 0.991929829120636], "prob_old": [0.9829809665679932, 0.3907073438167572, 0.37833449244499207, 0.3457311689853668, 0.26210546493530273], "prob_new_token": [9.79959295932531e-08, 0.0011922265402972698, 0.09740941971540451, 0.9247609376907349, 0.9885149598121643], "prob_old_token": [0.9666153788566589, 0.0004835303407162428, 0.001096369349397719, 0.00012395414523780346, 1.347985198663082e-05], "l1-model.layers.2.mlp.down_proj.weight": [44528.25], "l2-model.layers.2.mlp.down_proj.weight": [7.761608123779297], "linf-model.layers.2.mlp.down_proj.weight": [0.002005472779273987], "request": {"prompt": "{}'s record label is", "subject": "Iron Maiden", "target_new": {"str": "Imperial Records"}, "old_answer": {"str": "EMI"}, "seed": 42}}, {"loss_per_step": [4.512, 5.252, 2.709, 0.42, 0.01], "prob_new": [0.2917996942996979, 0.02819310873746872, 0.5614644289016724, 0.7569970488548279, 0.9902786016464233], "prob_old": [0.963931679725647, 0.3804221451282501, 0.011594505980610847, 0.04309864714741707, 0.14669206738471985], "prob_new_token": [5.0134713092120364e-05, 8.471718319924548e-05, 0.0004175594658590853, 0.28833985328674316, 0.9875643849372864], "prob_old_token": [0.9345325827598572, 0.0026980421971529722, 0.0002081852435367182, 0.0019132486777380109, 1.8786824512062594e-05], "l1-model.layers.2.mlp.down_proj.weight": [42172.4140625], "l2-model.layers.2.mlp.down_proj.weight": [7.521040439605713], "linf-model.layers.2.mlp.down_proj.weight": [0.0020057708024978638], "request": {"prompt": "{}'s record label is", "subject": "Roxette", "target_new": {"str": "Chess Records"}, "old_answer": {"str": "EMI"}, "seed": 42}}, {"loss_per_step": [3.451, 3.47, 0.459, 0.003], "prob_new": [0.5953251719474792, 0.6962467432022095, 0.8179126977920532, 0.9969741106033325], "prob_old": [0.963931679725647, 0.27354347705841064, 0.2674967348575592, 0.13982978463172913], "prob_new_token": [3.3683161859698885e-07, 5.7076519510701473e-08, 0.10174945741891861, 0.9947484731674194], "prob_old_token": [0.9345325827598572, 2.65700941781688e-06, 0.0014922359259799123, 1.3689386832993478e-05], "l1-model.layers.2.mlp.down_proj.weight": [39587.2734375], "l2-model.layers.2.mlp.down_proj.weight": [6.675627708435059], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "{}'s record label is", "subject": "Roxette", "target_new": {"str": "Domino Recording Company"}, "old_answer": {"str": "EMI"}, "seed": 42}}, {"loss_per_step": [3.273, 1.91, 0.074, 0.004], "prob_new": [0.6471943259239197, 0.6479429006576538, 0.9325892329216003, 0.995887279510498], "prob_old": [0.963931679725647, 0.4716857969760895, 0.4457153081893921, 0.4507642090320587], "prob_new_token": [5.775524550699629e-05, 0.003448703559115529, 0.8205541372299194, 0.9976016879081726], "prob_old_token": [0.9345325827598572, 0.0011285428190603852, 0.00010772640962386504, 6.7516630224417895e-06], "l1-model.layers.2.mlp.down_proj.weight": [35322.4140625], "l2-model.layers.2.mlp.down_proj.weight": [6.37240743637085], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024710446596146], "request": {"prompt": "{}'s record label is", "subject": "Roxette", "target_new": {"str": "Roadrunner Records"}, "old_answer": {"str": "EMI"}, "seed": 42}}, {"loss_per_step": [6.108, 2.112, 0.019, 0.008], "prob_new": [0.16709373891353607, 0.6055817604064941, 0.9810168743133545, 0.9919135570526123], "prob_old": [0.838421106338501, 0.44776982069015503, 0.41674527525901794, 0.3616555333137512], "prob_new_token": [4.084006377524929e-06, 0.0021526727359741926, 0.9832069873809814, 0.9902864098548889], "prob_old_token": [0.9269585013389587, 1.6914147860092044e-09, 7.970571425097717e-11, 1.8255924549848146e-10], "l1-model.layers.2.mlp.down_proj.weight": [39978.25], "l2-model.layers.2.mlp.down_proj.weight": [6.741215229034424], "linf-model.layers.2.mlp.down_proj.weight": [0.001502249389886856], "request": {"prompt": "{}'s record label is", "subject": "Arvo P\u00e4rt", "target_new": {"str": "Bad Boy Records"}, "old_answer": {"str": "ECM Records"}, "seed": 42}}, {"loss_per_step": [4.391, 0.523, 0.001], "prob_new": [0.32687321305274963, 0.7353867888450623, 0.9989094734191895], "prob_old": [0.838421106338501, 0.6248687505722046, 0.6253392696380615], "prob_new_token": [0.00020173887605778873, 0.2086167186498642, 0.9976438879966736], "prob_old_token": [0.9269585013389587, 6.5413159973104484e-06, 2.8999048495848e-08], "l1-model.layers.2.mlp.down_proj.weight": [33636.953125], "l2-model.layers.2.mlp.down_proj.weight": [5.263467311859131], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}'s record label is", "subject": "Arvo P\u00e4rt", "target_new": {"str": "BMG"}, "old_answer": {"str": "ECM Records"}, "seed": 42}}, {"loss_per_step": [4.226, 4.543, 0.97, 0.72, 0.01], "prob_new": [0.6266818642616272, 0.7393762469291687, 0.7498852014541626, 0.7607086896896362, 0.9905049800872803], "prob_old": [0.838421106338501, 0.445571631193161, 0.3188982903957367, 0.5229544639587402, 0.3935660123825073], "prob_new_token": [8.028961673289814e-08, 1.341507971375222e-08, 0.02107132226228714, 0.05702364444732666, 0.9973973035812378], "prob_old_token": [0.9269585013389587, 3.44831683207758e-08, 5.3513682360062376e-05, 7.626000751770334e-06, 5.986443285532772e-11], "l1-model.layers.2.mlp.down_proj.weight": [45375.0625], "l2-model.layers.2.mlp.down_proj.weight": [7.780109405517578], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058508962392807], "request": {"prompt": "{}'s record label is", "subject": "Arvo P\u00e4rt", "target_new": {"str": "Constellation Records"}, "old_answer": {"str": "ECM Records"}, "seed": 42}}, {"loss_per_step": [3.458, 5.621, 0.715, 0.016, 0.004], "prob_new": [0.647517204284668, 0.4589424431324005, 0.6979389190673828, 0.9843491315841675, 0.9958738088607788], "prob_old": [0.9727920889854431, 0.3296000361442566, 0.12421586364507675, 0.14975854754447937, 0.19654975831508636], "prob_new_token": [1.5967993931553792e-06, 3.4221800433442695e-06, 0.08034097403287888, 0.9581519961357117, 0.993925929069519], "prob_old_token": [0.9543884992599487, 2.4439603407699906e-07, 0.00024897983530536294, 2.765316457953304e-05, 5.421607056632638e-06], "l1-model.layers.2.mlp.down_proj.weight": [43192.97265625], "l2-model.layers.2.mlp.down_proj.weight": [7.665331840515137], "linf-model.layers.2.mlp.down_proj.weight": [0.0020031295716762543], "request": {"prompt": "{}'s record label is", "subject": "Willie Dixon", "target_new": {"str": "XL Recordings"}, "old_answer": {"str": "Chess Records"}, "seed": 42}}, {"loss_per_step": [9.219, 3.786, 1.196, 0.296, 0.067, 0.028, 0.009], "prob_new": [0.015344902873039246, 0.34386810660362244, 0.6609703302383423, 0.8003705739974976, 0.9379888772964478, 0.9728929996490479, 0.9906610250473022], "prob_old": [0.9727920889854431, 0.037067584693431854, 0.002995280083268881, 0.01023869402706623, 0.00880708359181881, 0.006973530165851116, 0.005744799505919218], "prob_new_token": [2.4477019906044006e-05, 0.0003345048753544688, 0.02901444211602211, 0.4198469817638397, 0.8313146233558655, 0.9310147762298584, 0.9791314601898193], "prob_old_token": [0.9543884992599487, 0.003841840196400881, 0.00010331976227462292, 2.049033901130315e-05, 1.915123448270606e-06, 3.79514773385381e-07, 6.156147946967394e-08], "l1-model.layers.2.mlp.down_proj.weight": [62481.7421875], "l2-model.layers.2.mlp.down_proj.weight": [10.413683891296387], "linf-model.layers.2.mlp.down_proj.weight": [0.002995983697474003], "request": {"prompt": "{}'s record label is", "subject": "Willie Dixon", "target_new": {"str": "OWSLA"}, "old_answer": {"str": "Chess Records"}, "seed": 42}}, {"loss_per_step": [4.92, 1.726, 0.793, 0.033, 0.008], "prob_new": [0.3289947509765625, 0.6619598269462585, 0.6911150217056274, 0.9679445028305054, 0.9922155141830444], "prob_old": [0.9727920889854431, 0.30595189332962036, 0.17927783727645874, 0.10111429542303085, 0.09783312678337097], "prob_new_token": [2.104927807522472e-05, 0.00575786828994751, 0.09452416002750397, 0.9150928258895874, 0.9831404089927673], "prob_old_token": [0.9543884992599487, 0.0003838770790025592, 0.001227873144671321, 2.1440826458274387e-05, 2.276146460644668e-06], "l1-model.layers.2.mlp.down_proj.weight": [43257.9609375], "l2-model.layers.2.mlp.down_proj.weight": [7.725369453430176], "linf-model.layers.2.mlp.down_proj.weight": [0.0020049121230840683], "request": {"prompt": "{}'s record label is", "subject": "Willie Dixon", "target_new": {"str": "BMG"}, "old_answer": {"str": "Chess Records"}, "seed": 42}}, {"loss_per_step": [5.694, 2.687, 0.919, 0.016, 0.023, 0.013, 0.008], "prob_new": [0.2287721335887909, 0.5153478384017944, 0.7376934289932251, 0.9837886691093445, 0.9778115749359131, 0.986817479133606, 0.9922773241996765], "prob_old": [0.9597165584564209, 0.41949260234832764, 0.32654356956481934, 0.1276101917028427, 0.012044653296470642, 0.0038542563561350107, 0.002491586608812213], "prob_new_token": [2.6331128538004123e-05, 0.00023831981525290757, 0.02733767032623291, 0.9786191582679749, 0.9918781518936157, 0.990177571773529, 0.9848339557647705], "prob_old_token": [0.9296897053718567, 0.009048426523804665, 0.02257663756608963, 0.0002827700518537313, 2.2123311282484792e-05, 9.916572707879823e-06, 8.284966497740243e-06], "l1-model.layers.2.mlp.down_proj.weight": [62678.50390625], "l2-model.layers.2.mlp.down_proj.weight": [10.471514701843262], "linf-model.layers.2.mlp.down_proj.weight": [0.002990950597450137], "request": {"prompt": "{}'s record label is", "subject": "Bob Marley", "target_new": {"str": "Avex Trax"}, "old_answer": {"str": "Island Records"}, "seed": 42}}, {"loss_per_step": [6.664, 2.59, 0.133, 0.022, 0.014, 0.01], "prob_new": [0.07398997992277145, 0.5400707721710205, 0.8814555406570435, 0.9785904884338379, 0.9863544702529907, 0.9904241561889648], "prob_old": [0.9597165584564209, 0.07599963992834091, 0.03046143613755703, 0.0933079719543457, 0.1577407270669937, 0.18219877779483795], "prob_new_token": [3.3657040603429778e-06, 0.000676615396514535, 0.7547478079795837, 0.950812578201294, 0.9642329812049866, 0.9745132327079773], "prob_old_token": [0.9296897053718567, 0.00031501660123467445, 6.587086681975052e-05, 2.774843778752256e-06, 7.91411309819523e-07, 3.1528389854429406e-07], "l1-model.layers.2.mlp.down_proj.weight": [56226.8984375], "l2-model.layers.2.mlp.down_proj.weight": [9.447375297546387], "linf-model.layers.2.mlp.down_proj.weight": [0.0025056861340999603], "request": {"prompt": "{}'s record label is", "subject": "Bob Marley", "target_new": {"str": "YG Entertainment"}, "old_answer": {"str": "Island Records"}, "seed": 42}}, {"loss_per_step": [4.225, 3.322, 0.012, 0.007], "prob_new": [0.5332008004188538, 0.6664544343948364, 0.9881733655929565, 0.9935182929039001], "prob_old": [0.9597165584564209, 0.4838014543056488, 0.4512940049171448, 0.43428468704223633], "prob_new_token": [5.208104084886145e-06, 4.693802475230768e-05, 0.9650593400001526, 0.981429398059845], "prob_old_token": [0.9296897053718567, 1.8325867131352425e-05, 0.0001809533277992159, 4.1024413803825155e-05], "l1-model.layers.2.mlp.down_proj.weight": [34371.7578125], "l2-model.layers.2.mlp.down_proj.weight": [6.3379807472229], "linf-model.layers.2.mlp.down_proj.weight": [0.0015020910650491714], "request": {"prompt": "{}'s record label is", "subject": "Bob Marley", "target_new": {"str": "Parlophone"}, "old_answer": {"str": "Island Records"}, "seed": 42}}, {"loss_per_step": [4.148, 0.449, 0.0], "prob_new": [0.4857935905456543, 0.6916407942771912, 0.9995236396789551], "prob_old": [0.9599932432174683, 0.32415640354156494, 0.33202674984931946], "prob_new_token": [7.512175216106698e-06, 0.36824294924736023, 0.9989386200904846], "prob_old_token": [0.9430908560752869, 3.5041819501202554e-05, 8.588145306021033e-08], "l1-model.layers.2.mlp.down_proj.weight": [31810.7109375], "l2-model.layers.2.mlp.down_proj.weight": [5.107110977172852], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}'s record label is", "subject": "David Archuleta", "target_new": {"str": "Big Machine Records"}, "old_answer": {"str": "Jive Records"}, "seed": 42}}, {"loss_per_step": [2.469, 5.88, 2.709, 1.277, 0.639, 0.019, 0.004], "prob_new": [0.6899889707565308, 0.25448402762413025, 0.6691203117370605, 0.7450723648071289, 0.7647398114204407, 0.9813131093978882, 0.9958055019378662], "prob_old": [0.9599932432174683, 0.22970855236053467, 0.272948682308197, 0.30827319622039795, 0.38678228855133057, 0.5276657342910767, 0.5520839691162109], "prob_new_token": [6.714757182635367e-05, 8.735297569728573e-07, 2.891726398956962e-05, 0.006199629046022892, 0.07907556742429733, 0.9346854090690613, 0.9891636967658997], "prob_old_token": [0.9430908560752869, 3.162951998092467e-07, 3.4080712794093415e-06, 0.0014811726287007332, 0.0008905858267098665, 1.4483342056337278e-05, 1.717717509563954e-06], "l1-model.layers.2.mlp.down_proj.weight": [55393.12890625], "l2-model.layers.2.mlp.down_proj.weight": [9.777067184448242], "linf-model.layers.2.mlp.down_proj.weight": [0.0029830411076545715], "request": {"prompt": "{}'s record label is", "subject": "David Archuleta", "target_new": {"str": "Interscope Records"}, "old_answer": {"str": "Jive Records"}, "seed": 42}}, {"loss_per_step": [1.949, 2.65, 0.075, 0.002], "prob_new": [0.6341511607170105, 0.7317832708358765, 0.9303698539733887, 0.9981213808059692], "prob_old": [0.9599932432174683, 0.33830469846725464, 0.32651957869529724, 0.34803521633148193], "prob_new_token": [0.0007293455419130623, 2.6870056899497285e-05, 0.8555554151535034, 0.998760461807251], "prob_old_token": [0.9430908560752869, 1.8237036783830263e-05, 5.3735067922389135e-05, 4.3238196667516604e-07], "l1-model.layers.2.mlp.down_proj.weight": [34381.890625], "l2-model.layers.2.mlp.down_proj.weight": [6.300790786743164], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024710446596146], "request": {"prompt": "{}'s record label is", "subject": "David Archuleta", "target_new": {"str": "Elektra Records"}, "old_answer": {"str": "Jive Records"}, "seed": 42}}, {"loss_per_step": [6.249, 4.222, 2.735, 0.33, 0.016, 0.003], "prob_new": [0.26162028312683105, 0.3647450804710388, 0.6653994917869568, 0.7905415296554565, 0.9845026731491089, 0.9972089529037476], "prob_old": [0.9433177709579468, 0.3079976439476013, 0.3059299886226654, 0.2563299834728241, 0.2583158612251282, 0.26113367080688477], "prob_new_token": [7.961657502164599e-06, 2.074203803204e-05, 0.00027474662056192756, 0.37232840061187744, 0.9545255303382874, 0.9930892586708069], "prob_old_token": [0.9054392576217651, 6.0387002065454e-08, 2.7821369030789356e-07, 5.574538022301567e-07, 2.6900732308376973e-08, 3.148788119133883e-09], "l1-model.layers.2.mlp.down_proj.weight": [49424.6640625], "l2-model.layers.2.mlp.down_proj.weight": [8.719528198242188], "linf-model.layers.2.mlp.down_proj.weight": [0.002477845177054405], "request": {"prompt": "{}'s record label is", "subject": "Brad Paisley", "target_new": {"str": "One Little Indian"}, "old_answer": {"str": "Arista Nashville"}, "seed": 42}}, {"loss_per_step": [3.929, 4.31, 0.865, 0.032, 0.006], "prob_new": [0.3092197775840759, 0.1723078340291977, 0.6737787127494812, 0.9693577289581299, 0.9939147233963013], "prob_old": [0.9433177709579468, 0.2541237473487854, 0.33422762155532837, 0.45049360394477844, 0.3775686025619507], "prob_new_token": [0.00030463607981801033, 0.002672678790986538, 0.0472567044198513, 0.905630350112915, 0.9931561350822449], "prob_old_token": [0.9054392576217651, 3.7431286159517185e-07, 3.8987150219327305e-06, 2.037044396274723e-06, 2.6237151473651465e-07], "l1-model.layers.2.mlp.down_proj.weight": [42769.515625], "l2-model.layers.2.mlp.down_proj.weight": [7.669353008270264], "linf-model.layers.2.mlp.down_proj.weight": [0.00200454518198967], "request": {"prompt": "{}'s record label is", "subject": "Brad Paisley", "target_new": {"str": "Avex Trax"}, "old_answer": {"str": "Arista Nashville"}, "seed": 42}}, {"loss_per_step": [4.737, 2.614, 0.074, 0.001], "prob_new": [0.29550591111183167, 0.42880499362945557, 0.9332402944564819, 0.9985288381576538], "prob_old": [0.9433177709579468, 0.4709652066230774, 0.5007737278938293, 0.5015048980712891], "prob_new_token": [8.883362170308828e-06, 0.0013407899532467127, 0.8029569983482361, 0.9981284737586975], "prob_old_token": [0.9054392576217651, 4.6089633087831317e-07, 2.8865187573501316e-07, 3.442761631333724e-09], "l1-model.layers.2.mlp.down_proj.weight": [37681.6875], "l2-model.layers.2.mlp.down_proj.weight": [6.557776927947998], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024691820144653], "request": {"prompt": "{}'s record label is", "subject": "Brad Paisley", "target_new": {"str": "Syco Music"}, "old_answer": {"str": "Arista Nashville"}, "seed": 42}}, {"loss_per_step": [1.879, 0.623, 0.005], "prob_new": [0.776971697807312, 0.6870715022087097, 0.9954742789268494], "prob_old": [0.8971783518791199, 0.27796339988708496, 0.2840152382850647], "prob_new_token": [9.389722981723025e-05, 0.28272053599357605, 0.9793086647987366], "prob_old_token": [0.975979208946228, 0.00022614788031205535, 1.4852306776447222e-05], "l1-model.layers.2.mlp.down_proj.weight": [31082.734375], "l2-model.layers.2.mlp.down_proj.weight": [5.035635948181152], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{}'s record label is", "subject": "LCD Soundsystem", "target_new": {"str": "Astralwerks"}, "old_answer": {"str": "DFA Records"}, "seed": 42}}, {"loss_per_step": [2.358, 0.087, 0.007], "prob_new": [0.7389751672744751, 0.9215312004089355, 0.993305504322052], "prob_old": [0.8971783518791199, 0.294570654630661, 0.2797994315624237], "prob_new_token": [1.0640055734256748e-05, 0.8608046174049377, 0.9930440783500671], "prob_old_token": [0.975979208946228, 7.067161641316488e-05, 1.4439182450587396e-05], "l1-model.layers.2.mlp.down_proj.weight": [36801.234375], "l2-model.layers.2.mlp.down_proj.weight": [5.539458274841309], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006783995777369], "request": {"prompt": "{}'s record label is", "subject": "LCD Soundsystem", "target_new": {"str": "Warner Bros. Records"}, "old_answer": {"str": "DFA Records"}, "seed": 42}}, {"loss_per_step": [1.184, 0.026, 0.001], "prob_new": [0.6790131330490112, 0.9762255549430847, 0.9986003041267395], "prob_old": [0.8971783518791199, 0.23544494807720184, 0.23274146020412445], "prob_new_token": [0.004107824992388487, 0.8128965497016907, 0.9956199526786804], "prob_old_token": [0.975979208946228, 0.00012578660971485078, 1.3429571481538005e-06], "l1-model.layers.2.mlp.down_proj.weight": [35610.703125], "l2-model.layers.2.mlp.down_proj.weight": [5.439943790435791], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006790980696678], "request": {"prompt": "{}'s record label is", "subject": "LCD Soundsystem", "target_new": {"str": "G.O.O.D. Music"}, "old_answer": {"str": "DFA Records"}, "seed": 42}}, {"loss_per_step": [8.583, 6.496, 1.103, 0.001], "prob_new": [0.2986251413822174, 0.47445037961006165, 0.5495610237121582, 0.9986803531646729], "prob_old": [0.9231381416320801, 0.34268197417259216, 0.5718607902526855, 0.5849564671516418], "prob_new_token": [5.8699924920802005e-08, 2.399554432486184e-06, 0.11151870340108871, 0.9995890259742737], "prob_old_token": [0.9641588926315308, 2.0234390376572264e-06, 0.0005919550894759595, 1.7245136518795334e-07], "l1-model.layers.2.mlp.down_proj.weight": [36320.0234375], "l2-model.layers.2.mlp.down_proj.weight": [6.379624843597412], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024826861917973], "request": {"prompt": "{}'s record label is", "subject": "Christina Aguilera", "target_new": {"str": "Imperial Records"}, "old_answer": {"str": "RCA Records"}, "seed": 42}}, {"loss_per_step": [5.253, 1.268, 0.034, 0.002], "prob_new": [0.3059892952442169, 0.6174941658973694, 0.9674453735351562, 0.9979247450828552], "prob_old": [0.9231381416320801, 0.14157770574092865, 0.010487988591194153, 0.056473247706890106], "prob_new_token": [5.599375526799122e-06, 0.026943422853946686, 0.9072111248970032, 0.9950847029685974], "prob_old_token": [0.9641588926315308, 0.00013649632455781102, 0.0001387097145197913, 6.538765319419326e-06], "l1-model.layers.2.mlp.down_proj.weight": [37778.8203125], "l2-model.layers.2.mlp.down_proj.weight": [6.6023712158203125], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024710446596146], "request": {"prompt": "{}'s record label is", "subject": "Christina Aguilera", "target_new": {"str": "YG Entertainment"}, "old_answer": {"str": "RCA Records"}, "seed": 42}}, {"loss_per_step": [3.996, 3.3, 0.013, 0.012, 0.011, 0.007], "prob_new": [0.47907790541648865, 0.6469521522521973, 0.9873277544975281, 0.9879257082939148, 0.9896049499511719, 0.9926389455795288], "prob_old": [0.9231381416320801, 0.5954107642173767, 0.5769226551055908, 0.4943997263908386, 0.47689947485923767, 0.4827132225036621], "prob_new_token": [1.3113991371938027e-05, 5.32714111614041e-05, 0.994072675704956, 0.9741564989089966, 0.9752585887908936, 0.9826869368553162], "prob_old_token": [0.9641588926315308, 6.505897545139305e-06, 3.62938351372577e-07, 1.2949303709319793e-06, 1.4689951512991684e-06, 1.389771796311834e-06], "l1-model.layers.2.mlp.down_proj.weight": [52510.7734375], "l2-model.layers.2.mlp.down_proj.weight": [9.136237144470215], "linf-model.layers.2.mlp.down_proj.weight": [0.0025073736906051636], "request": {"prompt": "{}'s record label is", "subject": "Christina Aguilera", "target_new": {"str": "Syco Music"}, "old_answer": {"str": "RCA Records"}, "seed": 42}}]