Korean
anhdungitvn commited on
Commit
c9788de
1 Parent(s): 3b52f44
.gitattributes CHANGED
@@ -40,3 +40,11 @@ prompt/model_step_400.json filter=lfs diff=lfs merge=lfs -text
40
  prompt/model_step_500.json filter=lfs diff=lfs merge=lfs -text
41
  prompt/model_step_520.json filter=lfs diff=lfs merge=lfs -text
42
  prompt/tokens.json filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
40
  prompt/model_step_500.json filter=lfs diff=lfs merge=lfs -text
41
  prompt/model_step_520.json filter=lfs diff=lfs merge=lfs -text
42
  prompt/tokens.json filter=lfs diff=lfs merge=lfs -text
43
+ prompt/adapter/model_step_0.json filter=lfs diff=lfs merge=lfs -text
44
+ prompt/adapter/model_step_100.json filter=lfs diff=lfs merge=lfs -text
45
+ prompt/adapter/model_step_200.json filter=lfs diff=lfs merge=lfs -text
46
+ prompt/adapter/model_step_300.json filter=lfs diff=lfs merge=lfs -text
47
+ prompt/adapter/model_step_400.json filter=lfs diff=lfs merge=lfs -text
48
+ prompt/adapter/model_step_500.json filter=lfs diff=lfs merge=lfs -text
49
+ prompt/adapter/model_step_520.json filter=lfs diff=lfs merge=lfs -text
50
+ prompt/adapter/tokens.json filter=lfs diff=lfs merge=lfs -text
prompt/adapter/block_order.json ADDED
@@ -0,0 +1 @@
 
 
1
+ [4999, 2417, 248, 3492, 6404, 3721, 7087, 840, 5341, 8656, 5862, 3701, 91, 233, 1611, 6182, 1156, 7295, 8429, 8479, 1731, 6970, 2440, 4473, 212, 4670, 4476, 1399, 6062, 5483, 5439, 1984, 2706, 5512, 7600, 5163, 1983, 661, 2302, 5411, 206, 1573, 1785, 4671, 8452, 3738, 952, 4217, 3546, 7640, 1891, 4816, 5023, 3269, 5201, 5345, 7909, 5687, 6583, 8127, 4309, 5413, 8566, 8534, 3037, 6708, 2082, 4973, 2326, 2283, 177, 3664, 484, 8518, 7178, 4206, 5883, 7496, 8548, 1908, 2154, 7232, 2734, 1922, 2809, 6226, 6204, 2492, 3825, 2276, 2867, 1504, 6948, 7221, 2726, 8341, 886, 6621, 4760, 359, 5229, 2474, 5033, 3560, 227, 1395, 402, 7613, 7777, 1377, 1376, 3500, 2159, 3484, 1825, 8105, 256, 2615, 3525, 2113, 4766, 6505, 4193, 4113, 7618, 7725, 2048, 6780, 1801, 3390, 5869, 8560, 7969, 196, 3461, 5319, 2465, 433, 874, 3413, 2421, 2903, 1771, 2511, 2791, 4789, 4814, 1196, 4467, 5740, 8578, 1758, 7332, 7028, 108, 4123, 2239, 6747, 8614, 464, 7272, 2599, 6487, 6698, 3807, 1247, 7470, 4083, 7727, 524, 861, 5543, 3932, 7448, 4253, 3822, 2982, 6177, 811, 1631, 3520, 6359, 5017, 3994, 1778, 3950, 3677, 5416, 2790, 7469, 5644, 7414, 6004, 3659, 3687, 2939, 3562, 3323, 6418, 3016, 6956, 3673, 6073, 6637, 4432, 5073, 3624, 3665, 6654, 7612, 8404, 6381, 3789, 4469, 793, 4987, 1946, 8192, 6739, 5476, 2835, 5378, 5435, 6112, 3606, 2826, 1122, 3692, 809, 8073, 4653, 4971, 1939, 4803, 6660, 3768, 4350, 1847, 5515, 707, 6781, 1605, 4404, 6559, 1806, 7750, 2181, 1292, 121, 5878, 2997, 7507, 2612, 2523, 4169, 3477, 3221, 2686, 8504, 1345, 5955, 8066, 4486, 2410, 4128, 2092, 6845, 7313, 783, 1356, 8448, 7890, 1514, 2950, 7526, 2839, 4374, 3626, 4466, 7011, 3447, 6025, 4097, 5348, 6927, 5442, 2958, 6104, 7432, 2233, 3196, 2610, 7270, 8556, 769, 3090, 3943, 898, 2382, 1562, 7018, 100, 1997, 3590, 2770, 7234, 3628, 725, 7334, 1033, 8034, 6337, 1705, 8071, 8283, 1691, 2710, 1382, 328, 830, 1864, 299, 3684, 6184, 5332, 1880, 3050, 2718, 2535, 6873, 3124, 3840, 3185, 5044, 3916, 4819, 1677, 499, 5540, 5422, 834, 8434, 7569, 8022, 6220, 732, 4906, 447, 7706, 3116, 6369, 3805, 7068, 3048, 5118, 5552, 753, 49, 3464, 5093, 3445, 6289, 7623, 6617, 1286, 5151, 498, 6362, 4058, 5524, 8179, 129, 6549, 7224, 2592, 1894, 7919, 6930, 8372, 5287, 5761, 5480, 7479, 5320, 8453, 8086, 8473, 6066, 7553, 7176, 729, 7423, 4556, 3597, 6072, 8618, 123, 5113, 39, 1695, 6398, 6449, 982, 3322, 8015, 7785, 2489, 393, 1626, 514, 5357, 847, 7140, 7040, 5954, 8524, 2590, 7069, 254, 5681, 6310, 5261, 3117, 2350, 7631, 1436, 1899, 5065, 3251, 5063, 2692, 4770, 1583, 1189, 8652, 2109, 8573, 7788, 5546, 3115, 5598, 2841, 399, 3436, 3897, 1650, 6935, 6604, 1757, 3759, 5692, 2644, 5852, 2373, 6589, 2727, 561, 8090, 6667, 1019, 3459, 4035, 3961, 5503, 545, 3083, 693, 6494, 244, 718, 3404, 2043, 4887, 7814, 3646, 5549, 4530, 3001, 4934, 2473, 3670, 3120, 6714, 3343, 5495, 6302, 3284, 1673, 4430, 7767, 7598, 6919, 2957, 904, 4728, 4199, 7994, 1230, 6353, 8147, 4682, 7662, 4652, 2445, 786, 1246, 774, 767, 7237, 1216, 3583, 3480, 6949, 5695, 7940, 6959, 1062, 6975, 4025, 7300, 2266, 1373, 119, 1641, 7659, 7204, 2797, 231, 6339, 3044, 8366, 5286, 2149, 5555, 460, 3133, 1519, 4394, 4071, 2611, 5727, 7183, 7737, 4398, 3984, 3263, 6200, 5472, 3716, 4733, 7596, 3369, 2768, 2932, 8041, 1674, 1367, 7827, 8292, 4450, 4544, 4519, 4171, 4335, 1862, 3305, 8198, 4589, 8611, 5471, 2717, 3106, 3735, 7346, 1327, 2660, 5931, 2271, 2074, 678, 7939, 8098, 2638, 4509, 3400, 7085, 7629, 7974, 3871, 1630, 262, 7933, 654, 8025, 7678, 8125, 4742, 8349, 6166, 1047, 6192, 7597, 8454, 7517, 3660, 7442, 3383, 6115, 2855, 1537, 7533, 2890, 7892, 8024, 7165, 1571, 674, 6365, 3144, 5986, 534, 3046, 5193, 272, 5913, 1590, 7259, 2038, 2403, 8255, 8567, 1773, 4183, 993, 8593, 3430, 5963, 7377, 8190, 7062, 8320, 1496, 2618, 6458, 416, 7696, 8476, 1906, 3839, 7398, 391, 314, 6, 3620, 6534, 7406, 4483, 3437, 3161, 7896, 4985, 1088, 438, 2274, 5404, 8343, 8080, 4940, 7886, 4538, 7867, 6419, 4426, 6015, 5623, 4191, 275, 4050, 2723, 185, 4457, 7778, 6693, 38, 1210, 933, 5886, 7243, 5486, 7732, 7233, 410, 1510, 676, 3900, 689, 4462, 947, 5902, 1293, 820, 5129, 6863, 6639, 421, 7023, 5322, 1959, 6491, 67, 4290, 512, 1759, 1861, 4419, 7278, 2521, 6557, 8385, 1108, 7824, 6619, 3220, 7294, 2854, 3167, 6516, 3987, 3488, 6545, 1742, 5859, 5724, 8088, 8630, 3639, 7274, 2641, 4537, 953, 346, 6537, 6526, 1530, 4036, 4576, 7804, 7724, 3135, 7217, 3793, 6978, 6199, 7265, 8390, 6915, 8594, 722, 2913, 4552, 731, 2202, 780, 1898, 6161, 4212, 3796, 858, 4382, 3713, 7990, 2110, 1009, 8388, 8082, 6407, 7080, 223, 4389, 5441, 634, 2779, 230, 1012, 3405, 3027, 6871, 5968, 3408, 6728, 6389, 115, 6566, 3893, 4636, 4968, 599, 6425, 88, 6413, 6570, 160, 4762, 3815, 5269, 2053, 4289, 3195, 1007, 5285, 5908, 7288, 2185, 3354, 4794, 8212, 5646, 7177, 4674, 1708, 5603, 3297, 2285, 5798, 7148, 2026, 3663, 7061, 5565, 1368, 7453, 2085, 7702, 2998, 2067, 2163, 5444, 4648, 7260, 5571, 6107, 3365, 3226, 5189, 5367, 7451, 4300, 2669, 168, 5614, 3087, 1324, 7081, 3411, 1661, 6607, 4303, 4847, 8232, 3762, 700, 2666, 4296, 5184, 6003, 5792, 2128, 4773, 1855, 430, 4858, 3962, 1797, 7686, 6338, 4688, 7480, 4567, 4601, 1989, 2836, 208, 1255, 882, 4132, 7723, 6646, 2000, 5699, 4936, 302, 4029, 1754, 5581, 5100, 3481, 2418, 4376, 8399, 6633, 8163, 1828, 5970, 5164, 10, 1528, 1680, 8219, 3438, 2432, 5575, 7472, 3524, 3787, 816, 2992, 5615, 980, 5344, 5032, 7309, 5966, 5478, 3690, 3715, 7131, 7226, 8069, 5169, 5412, 5518, 893, 4438, 5953, 1363, 3217, 3397, 3406, 136, 4609, 773, 8597, 4223, 3235, 3068, 6135, 2737, 6222, 3320, 8535, 1144, 1688, 235, 5717, 1355, 1322, 4195, 6885, 5018, 6686, 3966, 412, 6858, 6499, 5221, 7397, 1431, 5191, 2827, 7815, 5645, 2838, 5676, 3544, 6768, 3836, 4771, 4108, 3653, 4580, 1131, 5609, 590, 7683, 3731, 5241, 2733, 6097, 6624, 2296, 8062, 8433, 1262, 7934, 502, 6797, 6294, 4912, 2821, 3977, 8527, 704, 3559, 8395, 8301, 2655, 7090, 5530, 7810, 2192, 8040, 5498, 7122, 4951, 320, 3615, 1972, 4900, 5475, 3591, 469, 7452, 3641, 5022, 684, 6324, 1624, 7492, 5691, 6270, 8157, 1545, 5002, 6706, 1412, 1671, 4749, 6553, 2293, 7572, 6810, 5082, 8574, 750, 7171, 1582, 5674, 3254, 4238, 2223, 8412, 7017, 5535, 1751, 7079, 8603, 7772, 4993, 13, 4716, 187, 2548, 832, 7869, 1715, 1656, 3979, 5654, 7634, 4956, 8244, 7046, 4020, 4204, 4140, 2221, 6701, 7439, 3892, 5237, 2021, 5776, 435, 3255, 8257, 6246, 7637, 3671, 8064, 6778, 142, 6431, 7861, 6402, 5418, 8035, 3585, 4649, 3930, 6075, 4888, 5067, 2859, 2876, 2384, 3810, 6132, 1892, 308, 7491, 2258, 3374, 7993, 4298, 5389, 1416, 7513, 1336, 3873, 6456, 6194, 8306, 3858, 2877, 958, 2116, 6666, 4638, 3929, 5247, 546, 2678, 101, 578, 6120, 2496, 1064, 2344, 2087, 4502, 6046, 7031, 6756, 4780, 4618, 6579, 2661, 5390, 3829, 1448, 1951, 401, 1736, 4744, 4043, 6387, 5106, 2194, 2449, 2561, 7230, 1438, 2907, 4594, 7831, 6465, 1237, 6716, 7759, 331, 7015, 4859, 1575, 6704, 8499, 568, 8148, 1659, 1168, 818, 219, 6694, 2926, 5370, 4070, 5828, 5785, 8515, 6279, 6563, 1676, 4039, 2534, 1858, 4164, 4205, 8095, 1026, 7120, 3130, 2989, 5419, 151, 3852, 2862, 819, 5009, 2434, 3494, 3899, 7337, 1812, 3667, 6680, 4821, 3127, 8486, 7739, 441, 6311, 1993, 139, 4506, 4778, 3799, 5281, 6974, 1918, 7590, 8337, 7748, 5593, 6554, 1046, 1095, 1826, 1166, 1875, 7351, 2567, 4010, 8379, 3558, 6644, 4170, 6798, 6939, 2971, 571, 5688, 3843, 4741, 4559, 8628, 8600, 1949, 6377, 4893, 3314, 4292, 5446, 8087, 7499, 594, 4508, 1942, 5636, 8009, 5679, 3151, 6136, 6976, 4272, 4620, 2456, 8092, 8123, 5915, 383, 6962, 690, 4188, 1445, 4001, 7928, 1844, 5545, 3926, 6415, 7514, 3058, 6201, 2460, 1724, 6519, 5204, 6264, 8094, 2729, 4607, 3862, 7703, 3882, 8058, 4422, 178, 5731, 22, 1225, 6787, 8424, 775, 6615, 8166, 5041, 4031, 7363, 7582, 7361, 7604, 4647, 5149, 2531, 2264, 2668, 6568, 445, 3264, 3096, 4152, 2780, 5364, 7508, 5448, 740, 252, 31, 4775, 6614, 3184, 8624, 6816, 1077, 4666, 4021, 7115, 7336, 5078, 4230, 7870, 3529, 7995, 318, 8001, 644, 3047, 4348, 1149, 12, 6544, 7257, 8183, 4454, 1306, 5867, 6945, 5324, 4994, 2380, 8459, 1974, 4418, 4257, 327, 2505, 1221, 6700, 8100, 948, 5167, 1549, 4960, 7714, 3678, 5628, 8417, 1265, 2550, 4400, 5630, 8036, 7150, 6640, 7266, 7621, 7019, 190, 2591, 3412, 3417, 3702, 6784, 5194, 2475, 3981, 1638, 6522, 6910, 496, 26, 5089, 4586, 7531, 4621, 3325, 5161, 8540, 7765, 5026, 5481, 2139, 8384, 4247, 3617, 4915, 537, 3391, 4406, 2259, 5594, 8269, 5337, 7895, 1960, 1150, 506, 337, 1766, 2156, 2023, 3049, 3946, 7042, 2388, 8442, 4494, 2212, 3342, 2447, 5004, 1315, 8109, 4410, 1499, 6368, 6870, 4099, 4453, 3601, 2565, 6269, 7007, 1158, 2917, 7520, 5128, 4680, 4365, 5176, 3847, 7976, 8050, 2024, 7950, 4381, 2628, 8042, 5982, 3042, 7542, 5942, 7284, 135, 3059, 7227, 3028, 3433, 6931, 281, 7133, 5, 918, 1558, 3259, 4232, 7547, 3327, 5137, 6918, 8056, 5668, 3960, 4953, 6029, 5097, 6830, 6584, 4144, 6014, 4280, 1602, 715, 6308, 3931, 8030, 1842, 3959, 6018, 1359, 7535, 4755, 995, 4124, 6328, 2426, 7050, 984, 403, 284, 1651, 5175, 8455, 915, 4667, 2425, 8103, 6343, 7056, 3568, 3451, 1837, 8405, 440, 8531, 315, 8052, 7071, 2960, 8165, 8201, 3035, 4867, 6592, 374, 5209, 1973, 4200, 5489, 2190, 3084, 4958, 1183, 7884, 1232, 2468, 7483, 7663, 5158, 1643, 2824, 2775, 4991, 5520, 3203, 6211, 4167, 1941, 2201, 7982, 2690, 7410, 5881, 4072, 1937, 7811, 7021, 1241, 5840, 4047, 5713, 3733, 3070, 2720, 3347, 8207, 6749, 7427, 193, 4651, 6514, 7650, 2673, 7354, 1725, 615, 878, 8418, 1347, 8037, 7914, 6837, 3232, 294, 1956, 3642, 3085, 8027, 2665, 1208, 1893, 1195, 875, 4527, 3290, 6902, 6319, 685, 2869, 427, 3784, 8242, 1621, 7239, 6009, 6960, 7504, 6886, 7114, 8177, 8021, 8185, 4082, 5523, 6515, 6913, 688, 6460, 389, 7766, 1051, 4646, 3205, 8558, 6585, 8496, 6743, 8595, 1682, 4493, 5698, 6272, 3293, 2622, 7565, 5941, 742, 4776, 6463, 5906, 6173, 6304, 8136, 1723, 1070, 720, 3119, 4084, 7726, 7458, 5871, 4625, 4375, 1006, 1497, 4882, 7954, 6130, 3631, 6635, 3189, 4740, 220, 4176, 7906, 7162, 5597, 1598, 4691, 6478, 6521, 7744, 4259, 7421, 1932, 5246, 6223, 6023, 98, 3706, 2847, 4832, 4363, 8507, 2117, 5210, 1222, 4861, 7376, 432, 4732, 1644, 419, 843, 7981, 7571, 2986, 3348, 1703, 3080, 4492, 4133, 4878, 5202, 2461, 1411, 6084, 1743, 7523, 358, 5119, 7655, 2405, 6247, 2965, 2983, 5961, 5349, 7863, 6540, 553, 364, 2687, 3666, 3942, 1613, 6455, 4307, 3014, 2295, 3421, 8322, 589, 1620, 6195, 1800, 6147, 1392, 7894, 7809, 1263, 5657, 1982, 1053, 3603, 3725, 6809, 2479, 4416, 7754, 4127, 7016, 473, 459, 7488, 6077, 3834, 3669, 665, 7229, 3427, 1577, 7691, 8305, 1670, 2291, 7267, 1427, 222, 4845, 2372, 2340, 791, 3572, 2495, 4854, 7285, 6242, 2585, 2846, 1000, 1667, 511, 7900, 5173, 5768, 7574, 5767, 5359, 5558, 7585, 6597, 61, 5309, 7620, 4522, 712, 3947, 1678, 3134, 6678, 5182, 5633, 6709, 8641, 1566, 4310, 6642, 2137, 3939, 7112, 6779, 8214, 2040, 3804, 3681, 794, 2378, 5741, 2241, 5951, 3338, 981, 5279, 2663, 8029, 2676, 7255, 6410, 7506, 3372, 8258, 1593, 2311, 6032, 3654, 2814, 3728, 2052, 768, 5174, 3566, 2077, 215, 5428, 1056, 4250, 5216, 887, 5490, 4990, 2786, 5315, 6292, 5117, 6764, 4633, 7352, 5052, 1971, 3183, 759, 1896, 3013, 4975, 3592, 5263, 1370, 4500, 6069, 2941, 5445, 7938, 2228, 8450, 5386, 1820, 6996, 4709, 8335, 2879, 8117, 5610, 5653, 702, 3876, 87, 2613, 6361, 94, 4715, 1414, 5372, 3229, 2923, 3396, 540, 4304, 1028, 866, 347, 51, 8218, 2921, 8327, 6017, 7756, 2833, 627, 348, 6119, 4126, 7060, 3878, 8316, 1460, 5046, 6596, 29, 6101, 4736, 5095, 2013, 4192, 7475, 1555, 1999, 3990, 2765, 174, 4397, 5408, 7935, 5460, 3420, 7518, 3908, 1182, 4938, 8498, 5043, 4745, 5013, 4423, 394, 1420, 2338, 2094, 4353, 6665, 941, 6641, 144, 2795, 485, 1556, 247, 1135, 4377, 2147, 5415, 6513, 4569, 2084, 7899, 3410, 1040, 4693, 2091, 2685, 2199, 8484, 2771, 58, 697, 2616, 1471, 5994, 565, 7250, 7524, 388, 4301, 8358, 827, 2972, 5928, 6435, 1786, 4981, 6922, 4713, 8576, 1127, 4884, 4605, 2247, 4697, 3219, 642, 5218, 666, 2366, 2002, 3999, 6528, 503, 375, 150, 60, 698, 6811, 2379, 7925, 2662, 6399, 7664, 2056, 663, 6926, 7072, 4977, 6306, 902, 8344, 5976, 3446, 4254, 3280, 3388, 7581, 7660, 3832, 1739, 5492, 6891, 4241, 5036, 4788, 6741, 1049, 2183, 2815, 7707, 3192, 4218, 4048, 4411, 90, 3650, 785, 6268, 8331, 3295, 7830, 4282, 5092, 1067, 6972, 1209, 7105, 2324, 2232, 6106, 3252, 6380, 454, 6981, 368, 624, 977, 4017, 6011, 1257, 3487, 6228, 6277, 1581, 4005, 5239, 4408, 618, 2429, 5304, 8206, 1944, 1795, 3113, 442, 8554, 3123, 7437, 2670, 3, 987, 3854, 5711, 1385, 2083, 6209, 7220, 6964, 6376, 2469, 2575, 5531, 1713, 8067, 4490, 7871, 1763, 5632, 1829, 3915, 1093, 3493, 7541, 323, 3015, 1921, 1683, 2238, 3538, 6233, 2298, 4442, 7357, 4061, 7216, 3093, 4313, 491, 743, 5277, 7721, 6754, 7647, 6518, 1120, 1360, 7196, 3806, 4284, 5766, 3919, 6186, 6898, 7922, 1475, 4208, 1357, 4361, 7076, 2541, 74, 2450, 4923, 117, 3291, 2719, 7563, 6374, 4092, 6508, 5496, 4868, 4970, 7642, 7359, 4685, 1866, 3503, 7325, 8216, 3697, 1402, 2905, 3885, 3898, 756, 1684, 7137, 2157, 2691, 6841, 2712, 4992, 6643, 1928, 2504, 6497, 8414, 303, 1903, 1052, 8174, 638, 8149, 7192, 3564, 2642, 8651, 6127, 2973, 4270, 3533, 7534, 2072, 2227, 1137, 5311, 2512, 3914, 6835, 4631, 5918, 3791, 7367, 4746, 5333, 3523, 6877, 8533, 3191, 2491, 3689, 3456, 5305, 3023, 4051, 404, 7730, 157, 3737, 7303, 385, 8083, 2029, 4401, 8209, 8440, 6988, 4201, 1538, 418, 7749, 8480, 7645, 3296, 8176, 2027, 2853, 264, 8601, 1415, 7808, 5293, 5326, 3786, 5297, 4606, 2697, 6648, 1329, 4019, 8075, 5675, 4221, 3618, 1463, 806, 7813, 647, 240, 1729, 7501, 8099, 7425, 3146, 6027, 2470, 7386, 5533, 3336, 8154, 7583, 3312, 8351, 4802, 7966, 7279, 6253, 8108, 5894, 7970, 161, 1074, 5992, 5426, 5917, 563, 4750, 6064, 3841, 8145, 8047, 7795, 8323, 2557, 5048, 5127, 8612, 4811, 6717, 7024, 349, 4995, 3849, 1607, 7416, 5521, 7639, 3418, 1023, 6278, 5039, 2894, 5744, 3516, 5827, 3266, 903, 535, 4813, 3554, 5787, 8254, 7342, 2499, 978, 5855, 7097, 633, 4701, 5625, 3682, 3976, 7473, 562, 4524, 7191, 2704, 1565, 1856, 6121, 3549, 6993, 164, 3345, 7718, 73, 4443, 6550, 5417, 2645, 4785, 3570, 2130, 2457, 6760, 2288, 4799, 7688, 7980, 1126, 3541, 2596, 8250, 3109, 2716, 4535, 5111, 2399, 116, 5083, 4974, 5084, 3798, 1418, 3303, 8310, 8236, 1238, 6197, 3302, 1505, 3835, 4095, 6193, 6628, 2709, 8370, 2069, 7746, 3803, 3271, 8133, 6695, 306, 7677, 5139, 7161, 4579, 4138, 1840, 8510, 3457, 2667, 1266, 5547, 973, 4256, 5959, 772, 6668, 988, 3131, 5823, 4249, 7792, 4240, 8386, 5872, 1428, 4694, 449, 5275, 7, 5225, 1717, 7757, 2044, 4738, 6632, 8010, 1290, 4496, 5834, 5578, 4417, 2885, 4724, 7164, 7256, 1251, 7091, 1271, 5318, 748, 6287, 1423, 3586, 1468, 6043, 677, 8011, 5590, 6332, 946, 2020, 8613, 6766, 6734, 639, 1433, 810, 1755, 4487, 3294, 3894, 2360, 2943, 6966, 8267, 6179, 4717, 3479, 3194, 7142, 7035, 2654, 1031, 8443, 2215, 387, 6908, 6511, 1310, 5799, 1962, 7020, 8081, 3790, 1425, 7592, 6056, 1494, 3376, 2394, 4710, 2902, 2818, 1197, 3435, 1379, 8538, 1798, 881, 5995, 4998, 7816, 2891, 8135, 2634, 1325, 5362, 1931, 5410, 8285, 2553, 2268, 7378, 3031, 7348, 619, 4723, 6967, 336, 5900, 1665, 2166, 8237, 734, 5938, 6657, 5500, 5334, 963, 6587, 5077, 7211, 5550, 5177, 8307, 3958, 1716, 595, 6705, 8508, 6979, 5945, 7875, 4434, 16, 8570, 6826, 7157, 5818, 2579, 5178, 2955, 1348, 7426, 3648, 1895, 6808, 6329, 3688, 3005, 2782, 7722, 3385, 4596, 2171, 2279, 2564, 6482, 6883, 72, 3081, 4094, 8311, 4102, 2938, 6301, 204, 2009, 8590, 8260, 4557, 1823, 7617, 1663, 4825, 7380, 3707, 790, 8072, 4986, 5331, 1134, 35, 8529, 7747, 339, 2151, 2081, 8139, 8637, 7151, 1995, 5432, 1136, 7669, 2901, 5145, 6498, 2313, 4805, 527, 5054, 8038, 288, 6812, 6208, 3052, 4757, 82, 8031, 7436, 4386, 8362, 8360, 7353, 3767, 7054, 6992, 5477, 1060, 7316, 2979, 4287, 4096, 8497, 3848, 340, 8122, 3674, 7335, 7489, 3207, 2883, 5096, 6129, 4547, 6203, 583, 5833, 3017, 992, 3662, 8169, 1375, 2482, 1447, 3286, 8284, 7409, 1634, 8396, 3009, 912, 493, 3200, 1055, 4488, 3495, 4346, 7715, 4395, 4941, 4150, 6676, 6905, 2120, 6218, 5786, 5973, 7364, 7615, 7077, 4079, 396, 5824, 1259, 7607, 3299, 8426, 6408, 2307, 2588, 649, 8562, 7546, 3844, 5382, 7228, 8121, 4384, 2842, 628, 2064, 1553, 4873, 4665, 4062, 7134, 1465, 6984, 5907, 671, 46, 667, 3308, 1911, 4013, 1709, 7299, 3757, 3399, 4864, 4872, 3244, 4228, 3607, 2582, 7986, 8489, 7537, 699, 7092, 1393, 8196, 617, 4080, 3173, 8120, 1318, 612, 59, 7160, 3056, 6019, 1459, 4536, 278, 1745, 8375, 6943, 2112, 7955, 3089, 7393, 3453, 6240, 3944, 4319, 7979, 6793, 2204, 143, 5612, 7825, 2937, 198, 6884, 2632, 7247, 6273, 392, 4011, 5863, 8178, 3004, 5431, 959, 8410, 7657, 1378, 3332, 7573, 8085, 7036, 2796, 8532, 3377, 6239, 2643, 6406, 567, 3938, 7394, 8329, 8487, 7404, 5648, 723, 2245, 7764, 3387, 3321, 1016, 214, 6037, 7447, 8274, 6729, 3166, 2191, 3809, 541, 243, 994, 7430, 3514, 4081, 7462, 937, 6085, 6181, 1854, 2359, 2802, 4683, 4178, 3249, 5694, 4098, 3726, 2988, 3527, 2080, 1218, 5365, 3358, 1337, 3105, 1037, 6761, 8175, 6486, 246, 1001, 2374, 2370, 2547, 6674, 6571, 5937, 1287, 8167, 7248, 5924, 4156, 7682, 43, 6479, 7292, 3258, 494, 6038, 1913, 2323, 7433, 7343, 6867, 1018, 351, 6732, 1815, 3062, 7298, 1080, 2100, 1733, 3114, 7029, 2761, 5589, 4521, 6965, 5684, 6006, 2837, 2155, 8049, 5795, 5844, 2730, 3972, 261, 7984, 4967, 1291, 1963, 6401, 6141, 452, 5797, 1010, 4067, 3415, 7245, 2811, 2453, 1191, 7873, 7405, 8018, 8439, 5133, 8401, 1184, 1735, 70, 0, 5234, 6980, 4678, 3145, 338, 6291, 943, 6414, 6774, 557, 8409, 465, 4852, 914, 5222, 6155, 2054, 224, 1507, 8587, 921, 7059, 4702, 4540, 1068, 6160, 2558, 1796, 8584, 2625, 6212, 5591, 5233, 6622, 7852, 8119, 703, 232, 7913, 4961, 5298, 1419, 3008, 8321, 1269, 7684, 1702, 1250, 2560, 1002, 730, 2025, 6164, 656, 2609, 1592, 8289, 7948, 5796, 1105, 6800, 3318, 3326, 6280, 5000, 692, 1397, 4615, 1711, 1940, 3778, 5325, 3079, 8312, 8217, 3072, 3234, 3147, 6400, 1627, 2321, 1014, 6731, 4804, 6727, 2381, 6259, 2743, 3409, 7608, 5493, 462, 5620, 4513, 3627, 4315, 3604, 1965, 3973, 7589, 3063, 6113, 5984, 1764, 6881, 1616, 1978, 4131, 2118, 48, 4173, 8226, 6466, 5604, 4962, 7956, 3971, 691, 6451, 2952, 7690, 3776, 4839, 1384, 2918, 1636, 1446, 4396, 3819, 8168, 5621, 4883, 4908, 8279, 2840, 871, 4641, 167, 4352, 301, 7697, 4311, 5539, 4002, 7622, 8474, 655, 3474, 2887, 6958, 8403, 6416, 2964, 7485, 4074, 199, 7320, 7135, 6961, 5458, 132, 1594, 873, 3483, 1015, 7713, 6757, 6457, 556, 2058, 6561, 5058, 972, 3099, 5171, 28, 2303, 5264, 2679, 5861, 7189, 5956, 7762, 1649, 728, 8471, 5358, 5754, 5782, 7972, 6371, 5467, 8181, 7429, 5569, 6819, 1082, 3539, 7568, 4879, 7710, 6859, 3890, 357, 6813, 3182, 4034, 8565, 492, 5355, 7490, 4219, 4591, 5832, 8413, 5482, 4551, 5425, 295, 7889, 2240, 2542, 968, 6609, 989, 2587, 1061, 6987, 3395, 234, 8096, 5034, 3272, 2549, 4624, 4154, 3313, 2935, 6172, 7111, 6483, 4786, 4877, 209, 6490, 2467, 4853, 1548, 1540, 4622, 4237, 6344, 3638, 8143, 41, 6875, 1788, 2721, 8572, 747, 2888, 7839, 40, 2356, 8089, 2200, 3125, 4657, 2049, 2222, 4465, 8599, 4525, 5685, 2101, 1299, 2506, 6770, 7188, 8264, 2659, 781, 1606, 8406, 1, 3444, 6890, 8026, 456, 7798, 762, 8353, 625, 5559, 78, 4027, 5836, 755, 7373, 7004, 7803, 5822, 8286, 8222, 4526, 2347, 5846, 4210, 203, 197, 7712, 1728, 8546, 6074, 2922, 7820, 4919, 8485, 2319, 8591, 1588, 4855, 6044, 5262, 3557, 313, 1211, 3808, 944, 7632, 2030, 228, 463, 1188, 3717, 4276, 2273, 3057, 5760, 1152, 3414, 4645, 1128, 929, 1692, 2409, 1142, 7946, 7271, 5451, 5522, 3775, 3278, 1719, 7843, 8247, 845, 3781, 8368, 5351, 7038, 6257, 8646, 5156, 6341, 3765, 5383, 4545, 761, 5788, 4885, 5920, 5757, 2014, 8077, 7652, 5662, 103, 354, 5395, 3475, 5347, 4910, 3202, 3694, 1192, 7780, 4220, 2639, 6828, 1804, 2392, 8387, 2522, 1408, 1076, 5756, 1289, 5423, 265, 5249, 2671, 4698, 4511, 5147, 2060, 4190, 8106, 1455, 7067, 3201, 3672, 7917, 5854, 439, 6844, 4835, 7456, 7848, 5029, 6569, 2446, 7055, 6940, 543, 922, 7774, 6923, 7711, 1511, 300, 6148, 461, 4283, 4076, 7530, 8523, 2760, 2008, 3517, 5912, 6370, 6595, 8346, 1485, 6079, 271, 3685, 2936, 2207, 1288, 1557, 6171, 3142, 5024, 5562, 4699, 8233, 7570, 6889, 5631, 4184, 471, 1133, 7169, 7905, 5742, 2327, 1681, 5905, 8116, 7002, 3482, 1022, 1865, 3501, 7306, 7829, 966, 6792, 1477, 1513, 4658, 857, 8517, 3714, 3623, 4989, 5461, 4582, 8130, 1722, 5957, 1869, 5440, 1696, 5977, 8620, 4143, 2462, 4984, 7366, 4534, 4543, 8070, 3927, 7717, 4687, 828, 3460, 8580, 4869, 1309, 962, 8129, 7997, 8371, 1383, 6111, 8288, 4824, 822, 1500, 7311, 2262, 1603, 500, 4663, 2249, 4009, 4251, 6118, 128, 311, 1770, 7497, 522, 3128, 8514, 1169, 1614, 1358, 2974, 7146, 7130, 5736, 515, 7880, 3719, 175, 4180, 3589, 4116, 1740, 5162, 2248, 7173, 7198, 7887, 229, 2031, 7918, 7201, 1078, 7883, 5240, 7538, 4393, 8544, 1567, 4049, 1792, 5506, 7932, 4980, 4673, 5682, 1813, 3340, 4783, 1839, 4266, 5624, 3452, 4445, 7175, 6791, 2689, 6862, 5312, 3100, 3148, 6189, 7961, 4886, 752, 8309, 7781, 6769, 3783, 960, 4059, 6636, 6010, 6427, 5890, 7118, 3736, 6430, 2435, 7412, 3630, 2012, 7962, 7286, 2481, 6170, 63, 1845, 3821, 7331, 6158, 1004, 4371, 3758, 1116, 2170, 5021, 6167, 2422, 1672, 6543, 3304, 5563, 6675, 924, 6690, 8287, 1919, 8398, 3250, 971, 7902, 2398, 5775, 2963, 2187, 7975, 3846, 7836, 18, 7561, 3034, 4356, 7771, 8208, 7876, 6168, 4782, 2123, 4903, 3720, 4413, 7841, 4388, 8308, 7312, 6146, 8617, 173, 7923, 4497, 7000, 3156, 3794, 8355, 5582, 1486, 6036, 1843, 6843, 3861, 3466, 853, 216, 2332, 1027, 8170, 2942, 2831, 2539, 4053, 8215, 304, 4928, 7083, 17, 975, 5710, 7548, 3985, 7407, 852, 5683, 2635, 4379, 5011, 7389, 758, 5895, 5003, 844, 7047, 7327, 4446, 4857, 588, 297, 8184, 2682, 2059, 211, 4111, 3625, 4281, 4312, 420, 739, 5755, 8569, 6575, 5236, 6485, 3856, 6805, 3551, 1164, 3891, 3889, 6802, 7322, 5584, 5622, 4134, 8079, 2959, 5157, 1075, 901, 280, 3575, 735, 1330, 7277, 5168, 5363, 6071, 6807, 189, 6423, 7104, 1794, 6968, 6238, 8265, 4917, 3536, 1109, 2629, 1449, 6436, 733, 201, 7872, 3077, 1520, 4669, 7074, 7674, 6060, 5336, 7965, 4331, 4258, 4833, 5605, 5930, 2725, 3104, 4358, 5664, 4177, 3011, 6391, 5068, 3970, 3941, 1628, 8627, 5580, 4947, 5047, 5939, 7736, 6058, 501, 7467, 8494, 5302, 4460, 859, 5847, 894, 3423, 2674, 7856, 8333, 3074, 4105, 4336, 4421, 5611, 8468, 737, 1988, 7761, 1083, 2848, 2135, 2177, 7065, 4571, 5080, 3462, 3101, 8500, 2230, 7943, 2586, 5952, 2526, 4626, 2744, 1466, 879, 1986, 2318, 3149, 2152, 8342, 4147, 6532, 1231, 2527, 5407, 3177, 8629, 1857, 6842, 8420, 3748, 2430, 2895, 3578, 4090, 5203, 7051, 5889, 3683, 5997, 2342, 3216, 6488, 4437, 7644, 657, 1305, 3609, 3954, 3896, 2148, 5701, 895, 2951, 6467, 8304, 7012, 382, 3442, 1350, 8200, 2341, 7222, 4613, 4294, 4267, 3380, 910, 8649, 109, 6574, 6045, 2677, 7653, 2391, 5323, 5934, 479, 6080, 826, 7213, 5391, 7339, 3698, 5943, 3902, 267, 2961, 4370, 8657, 8016, 5969, 3373, 226, 5062, 4115, 7891, 3903, 7931, 4334, 2105, 4654, 2414, 3344, 477, 131, 6145, 7141, 6954, 1848, 2413, 2498, 7449, 6333, 2874, 1923, 5306, 7053, 7390, 4696, 4121, 5335, 7218, 7709, 446, 6916, 6169, 1734, 2165, 3225, 4274, 7045, 2107, 7203, 1344, 7716, 2896, 3371, 714, 1467, 5258, 6682, 3337, 3764, 4662, 7705, 5892, 745, 8634, 1568, 2252, 2098, 3509, 5421, 1096, 8276, 6180, 7139, 3491, 5993, 1312, 911, 2636, 3953, 8553, 8017, 6139, 5842, 3311, 1572, 1704, 1276, 7323, 106, 53, 6165, 6048, 6053, 3443, 7740, 949, 6050, 3289, 6235, 2243, 6876, 5450, 1461, 482, 6586, 7034, 6187, 8463, 8032, 7742, 3743, 3785, 8571, 2994, 4964, 3107, 8456, 257, 2173, 130, 4203, 7214, 289, 1236, 838, 5564, 490, 4862, 8227, 1790, 5935, 92, 3458, 5751, 8568, 1172, 7790, 821, 8180, 7158, 4179, 6342, 4566, 5076, 7215, 3174, 2597, 1658, 2182, 531, 4838, 5857, 1787, 7180, 7375, 3213, 5497, 5317, 7929, 4603, 5217, 1698, 163, 8537, 7307, 4954, 2169, 6722, 6360, 4898, 6946, 4451, 916, 5587, 5739, 5579, 1508, 7136, 3948, 5556, 3467, 2832, 3176, 7009, 7098, 3505, 1639, 3740, 1248, 2537, 5155, 6472, 292, 4213, 287, 2773, 1030, 3865, 6711, 6444, 8477, 5055, 8639, 2752, 6720, 4531, 7308, 7864, 5677, 5266, 4812, 7450, 3208, 7100, 1720, 6782, 6102, 3526, 8520, 159, 1868, 5647, 470, 2968, 3215, 253, 4896, 4792, 5385, 6412, 614, 4634, 7246, 5551, 5764, 475, 5704, 1258, 286, 3866, 1326, 7324, 708, 2396, 3680, 576, 8211, 7672, 1980, 333, 7212, 4966, 7545, 2299, 1927, 5831, 6901, 3989, 1881, 2153, 664, 5708, 5511, 3616, 2036, 8146, 5213, 2256, 7588, 2448, 3282, 5321, 2927, 2985, 6724, 7066, 4452, 1450, 472, 444, 4325, 6331, 6296, 613, 3139, 3455, 5876, 7209, 4139, 6582, 3918, 5488, 1101, 6225, 1267, 7926, 1111, 7052, 7847, 4969, 1335, 5427, 3936, 854, 8528, 8158, 5925, 6216, 2603, 1935, 8588, 1261, 8552, 4907, 152, 5665, 3980, 6335, 8248, 1206, 2176, 1979, 7344, 6432, 7947, 6117, 2246, 6936, 7008, 1228, 242, 1275, 3022, 622, 1272, 1541, 1654, 6565, 5387, 2533, 4085, 1744, 5356, 3478, 6299, 7290, 5642, 6938, 1017, 263, 3095, 6850, 7428, 4054, 4807, 3091, 5932, 5190, 4137, 6560, 3012, 3863, 112, 4155, 1509, 909, 3018, 5501, 5958, 3394, 3555, 5728, 5474, 162, 4355, 6481, 7857, 3082, 5714, 4904, 7779, 5456, 4392, 6134, 1901, 5789, 577, 5088, 1589, 8416, 6650, 2803, 3887, 5838, 1256, 6783, 3622, 7276, 6525, 5762, 3633, 4830, 6735, 1900, 5877, 8645, 7454, 1873, 5374, 3440, 2174, 5381, 96, 1115, 2604, 6763, 5143, 5226, 4876, 3030, 3317, 4041, 7558, 8046, 8151, 7129, 5634, 6022, 3779, 7768, 3813, 2990, 3425, 848, 7745, 5778, 7484, 114, 4952, 7575, 2870, 3599, 8447, 4412, 5393, 3831, 7865, 2133, 1264, 6897, 2458, 6282, 406, 1057, 533, 6852, 5066, 7039, 8101, 3175, 2045, 7096, 2141, 8608, 3594, 7729, 495, 2544, 3341, 8093, 1552, 950, 4261, 1666, 8495, 7687, 5735, 7529, 7797, 6634, 2545, 6523, 2260, 7916, 580, 8246, 1422, 2497, 1351, 4578, 1487, 7654, 2412, 6652, 6052, 2003, 1484, 5850, 1591, 7186, 4136, 1405, 6971, 5220, 1240, 3097, 8559, 6210, 6758, 179, 483, 1767, 2041, 7478, 1619, 6028, 6501, 5487, 7731, 3709, 5891, 5730, 2251, 4153, 5893, 5272, 6855, 4078, 6178, 4897, 5794, 880, 113, 5253, 5254, 5972, 1789, 5839, 8626, 3168, 1364, 5196, 6832, 6878, 1760, 1223, 7835, 2652, 6405, 83, 4369, 1664, 6309, 4484, 6937, 5949, 127, 1550, 6468, 564, 999, 4226, 2732, 6849, 4299, 3261, 126, 8408, 846, 4349, 6535, 8019, 4342, 523, 1380, 2310, 6882, 8621, 6536, 8511, 5183, 6759, 6063, 5747, 5295, 609, 6185, 6347, 4063, 6450, 5820, 4242, 519, 7075, 290, 1063, 6691, 925, 1453, 3933, 2205, 3703, 5328, 4338, 6175, 2287, 3800, 1470, 3178, 8616, 7648, 8063, 7143, 6252, 4926, 2589, 8347, 3983, 3237, 1612, 2016, 3463, 324, 4834, 4403, 260, 8354, 2602, 5637, 125, 415, 5671, 5585, 4925, 1564, 4277, 2899, 1284, 5087, 2882, 7360, 6982, 4965, 829, 1113, 1544, 1910, 3061, 4856, 2614, 1308, 2172, 4955, 5936, 1307, 5499, 7882, 7078, 372, 6334, 2333, 6033, 124, 6110, 8023, 5830, 6677, 760, 6094, 6599, 7431, 1396, 4345, 7853, 3267, 2367, 4763, 4840, 3439, 8097, 5519, 2889, 6623, 2825, 7838, 5845, 3519, 6865, 1859, 4922, 3975, 658, 7556, 4996, 5680, 1340, 431, 6461, 2005, 2763, 2914, 5091, 6448, 5599, 686, 194, 8654, 4279, 4643, 6953, 2132, 6864, 2600, 4351, 1585, 7834, 797, 3224, 36, 5709, 5470, 8007, 7802, 1877, 957, 4767, 2114, 805, 8519, 7549, 7003, 5835, 4963, 4558, 7849, 2808, 2728, 2767, 1791, 7369, 2272, 2568, 7851, 7086, 2329, 5099, 7952, 6055, 4354, 8589, 1902, 2555, 5732, 4326, 2559, 1353, 3922, 1885, 2694, 4362, 6934, 2696, 4637, 2799, 4598, 6888, 3324, 1535, 5901, 7328, 6529, 5526, 3242, 3351, 4269, 5219, 7202, 4918, 4323, 2236, 856, 673, 839, 2633, 7619, 1749, 5922, 1884, 6142, 5763, 3362, 8605, 3187, 5841, 8113, 2146, 6012, 5265, 508, 4420, 8261, 3556, 4322, 1106, 3055, 1647, 3026, 8152, 7109, 3515, 425, 3982, 3270, 680, 2969, 2754, 6082, 5568, 520, 8650, 6352, 2076, 489, 1239, 4286, 2317, 381, 183, 1686, 2066, 6447, 7027, 6275, 3424, 4471, 787, 6752, 4564, 293, 3565, 6564, 8014, 5430, 4808, 329, 2477, 4197, 3949, 1694, 7580, 2451, 6999, 2145, 5702, 6024, 5536, 6394, 3573, 8162, 593, 4590, 3350, 6572, 3051, 3921, 6671, 536, 5259, 6827, 1533, 8547, 1243, 6684, 488, 1025, 4120, 7181, 1283, 7127, 361, 3190, 3188, 8220, 1024, 513, 1586, 8044, 1374, 3905, 2331, 4409, 1883, 5737, 4166, 6434, 8091, 4725, 2178, 1762, 4055, 1388, 5280, 1361, 6298, 373, 2093, 3788, 5027, 6914, 8224, 6476, 8542, 3025, 517, 6083, 1872, 5517, 352, 7269, 5148, 170, 1810, 6548, 34, 4577, 8488, 7999, 5843, 8126, 4448, 6137, 4317, 4087, 6327, 3218, 6426, 8502, 8400, 7753, 134, 907, 2480, 6924, 6673, 544, 919, 3645, 5126, 1435, 709, 2161, 4101, 5491, 5394, 4033, 8160, 1977, 635, 5502, 4661, 1462, 3307, 3111, 2648, 7738, 4890, 4734, 8545, 2906, 3755, 6801, 6909, 4790, 5051, 1413, 2011, 2401, 1964, 1249, 602, 3553, 936, 4841, 1021, 5897, 5777, 1443, 3020, 572, 3470, 7633, 6409, 2892, 7099, 2700, 724, 2529, 5165, 5821, 7254, 6219, 8068, 5805, 5784, 5443, 6588, 2046, 3752, 2713, 4016, 2419, 4380, 2805, 4588, 2018, 4690, 182, 4739, 2253, 8234, 7566, 2368, 6086, 5987, 3506, 4207, 2507, 2431, 4114, 30, 5608, 1298, 4516, 3153, 1270, 5808, 6776, 3103, 2556, 6489, 3228, 5059, 487, 784, 5849, 5508, 3416, 6788, 5885, 1334, 8374, 5294, 7094, 4727, 6026, 6464, 4768, 5607, 282, 3140, 8428, 1693, 5268, 4262, 325, 1838, 2860, 3535, 6853, 1943, 1970, 5008, 4614, 636, 5243, 3951, 4568, 4499, 5290, 6751, 7326, 1648, 6215, 4168, 7319, 6214, 3231, 2962, 7903, 147, 3392, 330, 172, 6533, 6649, 6441, 7240, 7464, 1403, 4777, 2680, 5282, 2090, 4227, 1640, 4425, 3422, 6356, 8076, 800, 6846, 5974, 7510, 8256, 8155, 3010, 6021, 1282, 5749, 2476, 2967, 6040, 5639, 4265, 1442, 7818, 6581, 3450, 2975, 2355, 4122, 5112, 7194, 21, 8153, 5464, 2314, 7555, 1114, 552, 6047, 2664, 4860, 4042, 1909, 2328, 7206, 3301, 137, 6437, 3634, 6445, 721, 5463, 6442, 3227, 4427, 3468, 319, 719, 7888, 146, 4949, 1495, 3162, 1426, 2683, 6679, 8469, 6814, 3563, 149, 6054, 3429, 8141, 2653, 3998, 3428, 3751, 2774, 1874, 7527, 4175, 8583, 356, 706, 965, 1254, 3398, 7908, 6388, 8365, 4037, 2316, 1953, 6392, 8361, 1597, 652, 99, 1990, 6008, 7119, 1860, 50, 5773, 5085, 4405, 1945, 825, 3319, 2213, 7649, 5120, 3335, 2483, 1529, 4529, 2325, 1203, 6122, 3476, 6354, 3511, 8407, 2620, 4827, 316, 7383, 6443, 8338, 2552, 5807, 8197, 4909, 1563, 5985, 4000, 1342, 2608, 529, 1578, 4328, 1104, 1516, 3742, 711, 5641, 3883, 4066, 366, 1253, 3567, 2436, 6806, 1193, 241, 3913, 6616, 5946, 5377, 1044, 3596, 2844, 1469, 4818, 4064, 2576, 4344, 7463, 1746, 4248, 7049, 8230, 3859, 4721, 8431, 2179, 1876, 4057, 2688, 1595, 4752, 7041, 32, 2928, 6366, 4668, 8124, 7786, 7901, 3137, 892, 1930, 1534, 8622, 6372, 6715, 6163, 5700, 8430, 4498, 5420, 5070, 6105, 701, 7609, 7996, 3761, 47, 2970, 7107, 640, 3571, 258, 5360, 4504, 1961, 317, 5114, 8446, 7296, 1936, 8415, 4844, 5399, 1179, 8425, 5256, 7340, 7457, 5283, 2472, 6912, 5613, 970, 4463, 2167, 4976, 6390, 4198, 7025, 7957, 601, 2607, 1039, 4278, 3991, 8161, 3067, 1950, 1451, 646, 3934, 2711, 6839, 5626, 2851, 5042, 7806, 6703, 3032, 6326, 669, 3054, 7624, 5115, 8449, 3686, 5510, 3593, 560, 6541, 5310, 1539, 7166, 8111, 2070, 6320, 2502, 2289, 3019, 4939, 7154, 5343, 4689, 3108, 3498, 5745, 2551, 766, 3508, 5057, 5166, 5267, 2646, 2731, 4623, 2250, 4174, 2415, 5560, 7338, 3364, 7093, 2532, 6378, 5929, 8615, 2047, 2211, 3965, 1501, 5916, 4889, 4731, 5352, 4402, 4562, 54, 217, 6530, 4196, 8432, 5507, 3907, 1341, 497, 4595, 824, 7032, 8318, 2515, 7179, 6462, 3552, 5554, 1769, 4368, 3038, 5548, 5851, 5468, 1069, 2966, 4801, 7400, 1527, 7769, 3875, 1198, 3581, 3066, 2708, 8555, 5314, 1170, 8579, 7881, 5473, 5098, 141, 3298, 3098, 4774, 6237, 5998, 2878, 1780, 7577, 5538, 7505, 1478, 6869, 4866, 4491, 4318, 5086, 3595, 7236, 1824, 218, 8223, 7253, 7920, 5109, 95, 3356, 5865, 3214, 478, 4548, 5064, 5457, 2820, 3490, 6325, 7396, 5801, 6151, 4046, 312, 3003, 6323, 3746, 5040, 186, 4117, 378, 4570, 6196, 8541, 1916, 6653, 7261, 7812, 3610, 6907, 4997, 521, 849, 4347, 4243, 4932, 8238, 1833, 7559, 2089, 4849, 5329, 7801, 8059, 7487, 3154, 5006, 7658, 7616, 6773, 1139, 2086, 1174, 1244, 1584, 2695, 1452, 7219, 2866, 5544, 1732, 8550, 8462, 5903, 8625, 2305, 1401, 2278, 6659, 3306, 3675, 3895, 6605, 7603, 1317, 4597, 6576, 2486, 7057, 5748, 1610, 2028, 2292, 7676, 6942, 7125, 5195, 2390, 5868, 413, 6969, 5650, 3243, 5015, 3021, 7156, 450, 4581, 6224, 3582, 3995, 2478, 6103, 7720, 4978, 2954, 5746, 2606, 6345, 3246, 8377, 1081, 5455, 3172, 3454, 7770, 6126, 8191, 3331, 3504, 2312, 1145, 3712, 4222, 4781, 5398, 1224, 7692, 7942, 2255, 683, 6241, 6925, 5072, 1362, 1816, 1955, 5144, 3180, 2715, 1242, 1100, 6755, 3711, 1032, 812, 1159, 6713, 6100, 3002, 4905, 2595, 76, 6005, 2578, 1547, 7915, 7885, 801, 2122, 6762, 5574, 4959, 19, 7387, 4700, 6317, 2104, 8467, 6822, 4424, 6206, 5697, 6847, 4028, 1996, 7791, 8189, 7476, 6232, 5965, 6207, 6502, 8491, 1645, 4628, 7837, 6358, 2764, 1966, 8506, 1297, 1124, 3403, 1268, 2490, 7026, 2748, 360, 8438, 3197, 4441, 6230, 1821, 4447, 3281, 1777, 1386, 7318, 276, 7443, 7758, 4125, 2224, 6795, 2948, 8137, 5975, 8643, 3260, 4069, 3917, 8204, 2772, 4644, 623, 4305, 1912, 1493, 7859, 7333, 1853, 6340, 5405, 3830, 6149, 2524, 1887, 443, 4045, 6785, 6393, 2823, 6874, 607, 6590, 3078, 5081, 6661, 1180, 1162, 3771, 1155, 4942, 6872, 3209, 5619, 3181, 6933, 93, 7281, 1669, 8466, 7235, 7419, 7095, 6417, 7420, 3676, 3507, 862, 4004, 3265, 1161, 5200, 5122, 1097, 4779, 1818, 2787, 409, 3766, 8501, 645, 64, 4458, 5071, 4073, 7241, 3522, 991, 7370, 4829, 8427, 2562, 7760, 6683, 5911, 6687, 5803, 4627, 6281, 6153, 5813, 2407, 8493, 2953, 5656, 4826, 6251, 8422, 6198, 8435, 1130, 5909, 1774, 6638, 1660, 2219, 2868, 7551, 8464, 5160, 370, 8530, 2119, 2231, 7411, 1727, 6091, 4850, 5765, 5964, 6746, 3576, 5860, 1212, 1772, 3542, 5255, 6262, 6995, 2601, 6789, 776, 107, 4759, 7971, 996, 2063, 5231, 335, 4104, 249, 7701, 6041, 5250, 7784, 3992, 1181, 5271, 4129, 8549, 4293, 3363, 5527, 3817, 3471, 3801, 7989, 5104, 5244, 3658, 269, 407, 77, 5557, 648, 6656, 4894, 6994, 7540, 8389, 5577, 3699, 5330, 1167, 6699, 1011, 5079, 1409, 1390, 6899, 6001, 1085, 1882, 6601, 5923, 7783, 3315, 6330, 596, 6099, 4703, 2275, 6625, 3911, 3092, 2519, 3739, 687, 7522, 6904, 8112, 8294, 367, 3157, 4428, 3629, 1154, 3065, 1437, 1852, 3283, 6258, 4555, 5465, 7117, 3548, 3754, 56, 8647, 4574, 6736, 7991, 3141, 6973, 2500, 2294, 6243, 4737, 7630, 7625, 2400, 5284, 1888, 408, 3880, 6183, 4612, 7944, 1084, 1905, 4793, 5159, 5001, 434, 1675, 7444, 7170, 5342, 3118, 5238, 5327, 3587, 662, 6020, 6510, 6421, 3518, 3053, 5409, 2993, 4018, 6125, 8131, 474, 4895, 4431, 6098, 7033, 3993, 741, 4764, 3864, 5810, 7208, 384, 5864, 1280, 3760, 1559, 4593, 1576, 505, 7438, 2845, 8194, 7845, 3857, 2280, 539, 6031, 8586, 1473, 3069, 4880, 1199, 7084, 7879, 2739, 7641, 1730, 4337, 681, 2010, 5292, 395, 5134, 6128, 5402, 1300, 4937, 7554, 2464, 1119, 1721, 4784, 3441, 2762, 7636, 2184, 1165, 5016, 3727, 2514, 764, 3379, 7355, 7121, 3823, 3006, 6131, 5529, 4664, 386, 4610, 5616, 3710, 33, 8376, 2343, 4708, 4146, 6702, 3695, 6887, 620, 7415, 945, 6503, 1480, 250, 2217, 792, 5649, 7418, 8245, 6833, 2383, 5723, 176, 5703, 2336, 7591, 5278, 4003, 251, 1830, 55, 3382, 5596, 4366, 1699, 3811, 5962, 3247, 2574, 6520, 5188, 8458, 7525, 5090, 1915, 1087, 8475, 2843, 5123, 1783, 7408, 400, 5858, 8619, 4185, 8203, 3132, 5706, 4006, 3129, 4464, 5921, 7968, 4320, 4809, 3543, 3513, 2126, 398, 6244, 6480, 4306, 2702, 4065, 3722, 2925, 4378, 3233, 457, 5988, 605, 4828, 8610, 5880, 6856, 4517, 5693, 5447, 4271, 6213, 2804, 2984, 8324, 5541, 1835, 5340, 4692, 5753, 7207, 2111, 7349, 7282, 2929, 7153, 7860, 1851, 1227, 1404, 670, 7930, 2605, 5721, 2934, 2438, 3963, 1302, 3833, 5130, 6234, 4495, 923, 2813, 3842, 4060, 5743, 4103, 2757, 2284, 6256, 1366, 6737, 4214, 757, 2193, 3245, 7268, 6081, 6314, 675, 6983, 4235, 6078, 8275, 2346, 8193, 6831, 7528, 5802, 274, 6707, 6772, 2075, 4730, 4711, 2210, 8509, 6266, 7088, 872, 7826, 877, 7557, 6492, 3826, 558, 5733, 2265, 7073, 815, 4660, 7782, 7434, 1132, 765, 1479, 2131, 2097, 2584, 3179, 7936, 5132, 6484, 7058, 5212, 7842, 5853, 6300, 3212, 2944, 270, 1071, 2857, 8271, 4439, 4871, 6348, 2940, 2977, 2753, 3611, 920, 7509, 632, 7494, 5300, 3621, 2554, 3925, 4820, 8655, 4751, 4480, 2540, 2956, 7498, 1805, 7960, 5354, 1321, 3923, 7124, 8253, 3253, 3837, 868, 5978, 5856, 5316, 3389, 4972, 4142, 2817, 2226, 6312, 4684, 3268, 2746, 2099, 2335, 7685, 2281, 668, 3329, 2306, 4823, 5291, 2017, 1102, 2345, 7704, 4372, 4901, 8187, 507, 6190, 8, 3155, 205, 7471, 7297, 3143, 5990, 104, 5606, 2503, 5061, 1841, 1886, 2195, 976, 5303, 1811, 890, 8382, 3339, 5308, 4632, 7500, 4761, 2301, 3238, 6506, 2441, 2125, 8457, 8623, 1958, 5050, 3171, 1274, 2269, 6620, 795, 2508, 4239, 746, 6440, 7089, 184, 2572, 7321, 5199, 4, 7082, 1235, 5967, 7646, 1201, 2297, 8313, 876, 3276, 1200, 5772, 3655, 3033, 3064, 1814, 4302, 2931, 1295, 2208, 7010, 1123, 3957, 8328, 1185, 6152, 1546, 5944, 5806, 5816, 979, 1303, 4950, 2007, 2363, 2062, 5715, 4444, 1748, 7689, 3828, 2624, 2103, 7832, 343, 1273, 4514, 291, 4842, 2699, 6917, 8263, 7149, 4630, 7807, 7064, 2186, 5274, 653, 3777, 870, 4554, 1245, 3136, 1187, 3780, 581, 6245, 6265, 1998, 1048, 1525, 3510, 4455, 2439, 1214, 4507, 3700, 1029, 6745, 1685, 2637, 2235, 3968, 2455, 7516, 3426, 4650, 5567, 2747, 6396, 7799, 1947, 1190, 4330, 2784, 4163, 5232, 7666, 8296, 6818, 7665, 8104, 6108, 2525, 1013, 7187, 1058, 3386, 436, 6610, 1349, 458, 710, 1151, 5037, 3569, 4943, 2538, 4836, 4899, 4769, 37, 6767, 7643, 6710, 888, 1775, 5135, 1834, 1483, 3612, 3075, 7385, 8330, 3152, 1304, 1311, 7532, 6892, 7586, 4944, 579, 2286, 369, 5205, 5116, 5627, 7403, 2203, 5809, 2881, 7251, 3964, 15, 8472, 7679, 961, 4563, 80, 1490, 3732, 3723, 6293, 5392, 7172, 6070, 1034, 2106, 7638, 4479, 3346, 7733, 8340, 3206, 4112, 3285, 355, 1352, 7242, 1871, 1050, 8118, 8134, 6848, 2022, 7147, 8270, 8043, 5452, 1328, 1846, 3955, 7048, 5361, 4924, 2352, 7108, 8577, 8336, 6929, 1779, 1389, 2270, 7953, 371, 1904, 7519, 6658, 8188, 6957, 2593, 6860, 629, 3602, 1090, 5940, 468, 2510, 788, 7552, 1506, 3333, 5227, 165, 5152, 1867, 905, 2423, 4245, 8281, 7671, 8380, 7695, 6363, 7347, 7776, 7350, 4044, 8078, 111, 7959, 5366, 4892, 1331, 6039, 1430, 2428, 1679, 841, 7698, 6689, 2916, 1234, 7289, 422, 8451, 4118, 1897, 939, 7466, 3169, 5678, 423, 5479, 5005, 5136, 5371, 5658, 6730, 5384, 6997, 1994, 7755, 1177, 3584, 7422, 3465, 7384, 4946, 6786, 7138, 4948, 4642, 2766, 5927, 4795, 6051, 6446, 3855, 7197, 5602, 2996, 6274, 283, 4216, 2376, 8369, 4617, 4575, 5110, 7794, 5528, 7978, 6552, 2234, 110, 4275, 5919, 7821, 4933, 2364, 140, 7310, 2807, 1925, 3160, 6821, 6880, 7460, 1807, 3763, 1398, 3867, 2158, 4935, 5434, 5595, 5397, 5074, 2834, 3744, 8065, 6921, 3007, 3772, 7155, 1809, 7789, 1623, 7904, 8551, 744, 1220, 2946, 2237, 4316, 1454, 2140, 1802, 5299, 1599, 4052, 3869, 6947, 4753, 8585, 2516, 7374, 138, 884, 334, 4573, 1615, 2852, 7287, 7601, 6578, 8282, 2198, 8505, 1601, 3600, 6109, 7238, 5769, 798, 4635, 8205, 626, 6618, 6573, 2919, 5125, 2651, 81, 4148, 4921, 7858, 8272, 7113, 6000, 1926, 2361, 4385, 7907, 3656, 555, 942, 6061, 4718, 6350, 6174, 1482, 8441, 5414, 1536, 7567, 7584, 3000, 8293, 7043, 3824, 603, 1125, 4756, 1522, 4748, 6688, 6221, 8465, 2393, 4032, 8202, 2658, 5369, 5673, 1907, 610, 5999, 2277, 4726, 3874, 1279, 322, 6829, 2778, 1554, 6379, 4407, 8173, 8266, 1171, 5617, 7924, 2900, 5759, 1043, 3816, 5819, 6385, 8006, 5752, 8394, 8115, 8490, 154, 2509, 1819, 7560, 796, 7417, 2875, 4714, 2909, 2357, 2055, 1492, 6276, 1952, 896, 5485, 6375, 592, 3770, 5800, 6866, 1776, 4038, 379, 1219, 3110, 8378, 210, 4181, 1570, 1140, 4911, 6420, 4602, 3756, 14, 1889, 8164, 2536, 637, 8522, 8221, 1991, 5991, 7301, 3448, 1701, 4436, 5350, 3952, 3300, 6090, 1285, 3818, 3366, 7128, 6202, 7063, 6726, 5069, 6611, 6321, 2930, 906, 3112, 1020, 1045, 736, 4297, 4157, 713, 7382, 8132, 2395, 1094, 7249, 2898, 3860, 7675, 2065, 5898, 5980, 1157, 8381, 6765, 3647, 1066, 4160, 3827, 148, 5583, 7805, 1750, 2751, 4234, 509, 2254, 6096, 3368, 2530, 7544, 8280, 5826, 6998, 8421, 310, 789, 1992, 2442, 7210, 3845, 6007, 4655, 7599, 2976, 7474, 6087, 8636, 4119, 4244, 3974, 3877, 7822, 7983, 8060, 6685, 6459, 3138, 3375, 1117, 5983, 990, 4224, 3393, 4846, 7199, 158, 1803, 8326, 3310, 5053, 749, 6507, 1718, 833, 1697, 255, 1747, 7941, 6092, 7719, 3920, 5960, 4532, 5101, 7949, 6851, 542, 8359, 3580, 5888, 6740, 5346, 2501, 1091, 1707, 6315, 5873, 341, 3039, 1421, 5438, 8240, 782, 5542, 6318, 964, 2707, 559, 6580, 4359, 1822, 4015, 6512, 7424, 570, 8053, 2684, 1502, 7828, 8363, 4542, 2225, 6088, 998, 8004, 7594, 6820, 1768, 3651, 2073, 4158, 8604, 7126, 551, 1831, 5181, 4520, 8159, 6985, 606, 5814, 5075, 6065, 1569, 5588, 6469, 1118, 2788, 2351, 2750, 5804, 547, 1784, 7205, 6991, 4165, 7001, 66, 6538, 5534, 1712, 1741, 8186, 1753, 6944, 1687, 6002, 4640, 5629, 2800, 6428, 5056, 574, 6799, 7291, 4456, 7667, 7362, 7283, 4891, 7184, 3275, 24, 5722, 672, 4485, 727, 3211, 510, 4796, 8642, 1604, 11, 4255, 1706, 8182, 5401, 1369, 7168, 3644, 2783, 429, 3126, 7681, 8543, 3349, 7921, 345, 1176, 4553, 4916, 192, 6896, 3407, 6229, 3512, 817, 7195, 79, 1474, 2735, 2978, 5230, 5307, 4263, 8002, 7037, 3652, 7840, 397, 2427, 2404, 6669, 8013, 4843, 365, 8606, 2740, 7670, 1920, 804, 4273, 4546, 1531, 5103, 7911, 1065, 1301, 4340, 2577, 2196, 3262, 2424, 7005, 476, 5638, 2454, 5875, 6093, 7402, 2375, 8008, 2175, 771, 6719, 3868, 97, 5025, 6836, 45, 3241, 3024, 8633, 5726, 4339, 363, 6263, 1523, 4383, 3997, 2789, 2856, 4026, 5433, 598, 6725, 3802, 2722, 2543, 1296, 4672, 1354, 4007, 7070, 7945, 1836, 2362, 6854, 5592, 8393, 5884, 7264, 5273, 8150, 5035, 4211, 1488, 390, 1532, 62, 4012, 4414, 2408, 2339, 4440, 6035, 1432, 802, 2300, 7345, 8012, 6124, 5866, 2134, 8243, 3292, 1339, 7854, 6794, 8055, 4429, 4367, 4541, 5509, 486, 8607, 7223, 6771, 4285, 2806, 2714, 5197, 8482, 4817, 2736, 2095, 5301, 1948, 2781, 6900, 7787, 4357, 3431, 3360, 3449, 3432, 8138, 8461, 4930, 860, 2471, 3928, 2320, 3309, 1782, 5012, 3248, 5429, 1008, 239, 2569, 3605, 2893, 4729, 6156, 8581, 6990, 3613, 1215, 4501, 2650, 6373, 5561, 6777, 3749, 3496, 1657, 2583, 2197, 8373, 6176, 153, 600, 1924, 5705, 651, 298, 6030, 8536, 5652, 3945, 1938, 5436, 5734, 6386, 2656, 7262, 4503, 3838, 2387, 166, 8512, 2304, 584, 6524, 3750, 6076, 659, 6140, 3040, 8402, 7461, 2865, 6438, 2068, 1633, 7898, 7967, 6395, 7964, 889, 4068, 5146, 5513, 5570, 1561, 2594, 6429, 582, 6191, 630, 7626, 6517, 2261, 4584, 3545, 2371, 4929, 2257, 1870, 869, 1934, 5102, 2220, 2756, 900, 5750, 8299, 5783, 6249, 2180, 7844, 5388, 2039, 8114, 6297, 2334, 6629, 2121, 2488, 4719, 2290, 8596, 5586, 2933, 2057, 3230, 2001, 569, 6116, 5926, 5403, 754, 1372, 3904, 6496, 5576, 4373, 705, 6227, 4135, 6906, 6555, 538, 3029, 8563, 8061, 8635, 5462, 2209, 751, 1387, 7495, 4875, 3367, 3041, 951, 2385, 863, 3598, 6422, 7963, 6804, 2912, 3579, 8210, 604, 8298, 528, 7381, 2801, 2142, 85, 3164, 3257, 7521, 6527, 5725, 7673, 2949, 8084, 3561, 266, 2397, 350, 1981, 1294, 1756, 8102, 934, 5019, 4030, 803, 5659, 4587, 2050, 6473, 5666, 4677, 6452, 6738, 5296, 5353, 855, 6593, 1178, 1054, 4747, 7258, 6355, 2871, 3851, 6670, 6750, 1765, 1439, 309, 1596, 7973, 5187, 7539, 2777, 6989, 532, 4561, 6613, 885, 8142, 2999, 3924, 5192, 195, 5904, 5811, 575, 6645, 7893, 4720, 4815, 105, 8592, 3812, 1319, 3969, 44, 983, 8582, 2623, 1036, 2828, 2649, 4510, 1252, 4920, 5651, 779, 236, 1637, 4583, 7502, 4106, 2164, 1381, 5121, 6303, 2263, 2910, 4024, 5914, 4470, 1957, 71, 5313, 259, 5660, 7145, 5376, 1618, 3158, 7651, 1600, 6495, 5107, 6248, 5815, 1441, 7356, 52, 3473, 1726, 8648, 6346, 2793, 4056, 7763, 2571, 3729, 1260, 5252, 6600, 7912, 2995, 2466, 6313, 1863, 5186, 1954, 6672, 2358, 2812, 3668, 5667, 6651, 5028, 5829, 6217, 549, 4391, 2816, 437, 974, 1316, 7817, 2518, 2015, 2822, 6696, 6692, 7562, 4225, 4599, 2354, 2693, 2573, 6150, 7102, 3043, 2698, 3879, 7515, 6067, 7358, 3045, 5257, 6547, 8383, 7910, 8262, 6453, 823, 7605, 3073, 3635, 1338, 8339, 1503, 7606, 6068, 8332, 1458, 525, 1635, 5437, 8356, 7435, 5870, 7231, 3935, 917, 4505, 6283, 3076, 277, 3870, 68, 932, 2566, 6603, 7477, 3378, 2640, 2864, 8419, 4743, 4202, 4675, 8315, 6546, 7482, 3528, 7868, 6542, 3316, 6285, 5672, 3906, 1642, 3537, 2981, 8350, 8525, 118, 8140, 411, 5153, 2513, 5516, 7302, 25, 2144, 2206, 3657, 7273, 7793, 6840, 5812, 2849, 2168, 6250, 6383, 7293, 8241, 5573, 6493, 326, 8268, 5504, 2830, 7163, 831, 4539, 4800, 6893, 23, 2546, 4023, 1498, 2908, 8302, 6114, 2657, 3353, 7668, 548, 4523, 86, 3531, 5180, 778, 1005, 4459, 6606, 7006, 5696, 5248, 1850, 3708, 608, 6236, 6439, 738, 5669, 426, 8503, 2033, 2520, 2528, 7305, 1163, 5618, 935, 631, 1205, 8128, 1313, 7877, 6367, 4100, 2897, 7103, 6950, 1808, 6254, 591, 1690, 7800, 4089, 8033, 1737, 3434, 8516, 7459, 5484, 3330, 6838, 1929, 133, 6834, 3193, 6157, 4093, 7610, 7200, 1440, 3909, 2705, 3472, 6397, 4585, 3814, 2675, 2, 1332, 6316, 8598, 5179, 926, 7823, 424, 3901, 8199, 2267, 2348, 611, 6474, 3355, 4881, 2160, 1333, 4822, 6556, 7468, 4088, 2143, 5712, 8045, 1343, 7371, 2389, 7440, 5170, 5948, 6349, 6364, 27, 8325, 7602, 2850, 238, 7182, 4592, 1700, 8348, 5933, 2581, 448, 2829, 5601, 69, 8564, 4110, 6336, 5211, 694, 3273, 8411, 1072, 4608, 2484, 3661, 2034, 3853, 7850, 4477, 7579, 7379, 7116, 2102, 2863, 7564, 4308, 573, 4209, 3222, 4679, 1617, 2108, 3489, 7796, 3912, 940, 7190, 6307, 296, 4600, 4415, 5031, 5887, 8225, 2315, 3705, 4482, 5910, 6013, 3679, 188, 6951, 566, 4914, 5206, 4707, 1098, 1323, 3088, 4676, 7315, 587, 1689, 4314, 332, 837, 6744, 4681, 1059, 4040, 7694, 5537, 6286, 2127, 3402, 1629, 7743, 6803, 8557, 3239, 4798, 7741, 5771, 5469, 6920, 3632, 6049, 4233, 2819, 1526, 4988, 3886, 8300, 8277, 4639, 7855, 4604, 6790, 5779, 6231, 2349, 7341, 3277, 3978, 6594, 7445, 6290, 967, 5289, 3691, 7587, 5790, 6123, 7392, 1574, 5663, 643, 7044, 842, 4982, 8259, 7661, 1632, 8251, 3967, 8367, 8444, 7368, 3888, 7280, 122, 180, 2911, 3724, 6351, 8397, 9, 6154, 2980, 3370, 57, 6288, 8478, 7365, 3797, 8437, 5720, 3499, 5566, 4231, 1662, 7693, 7614, 4686, 4162, 2920, 3540, 3619, 7992, 377, 376, 4712, 6577, 2619, 2487, 3637, 1976, 4611, 4902, 3240, 353, 5424, 1035, 5572, 7413, 7388, 4528, 3940, 2755, 5396, 5140, 6188, 8039, 1400, 7225, 5251, 6591, 2322, 171, 6986, 5108, 616, 2218, 481, 1521, 4659, 7159, 2570, 7576, 1849, 6903, 2580, 7493, 938, 2019, 2758, 2745, 6162, 6143, 5215, 4252, 2647, 1456, 516, 8228, 2785, 20, 4333, 2741, 6042, 7503, 5138, 2386, 3734, 6539, 7391, 986, 2987, 1652, 6733, 2078, 807, 6261, 7595, 5553, 2742, 836, 7878, 6016, 8513, 1491, 5007, 1646, 3792, 2452, 3608, 650, 4515, 8303, 6133, 4616, 6868, 4364, 4291, 891, 5979, 307, 5690, 4327, 5655, 1092, 1512, 6823, 5466, 6551, 7628, 4704, 3704, 1781, 7635, 3204, 1160, 1587, 8048, 5600, 4765, 7987, 6879, 1429, 1668, 2884, 1346, 2115, 6095, 7593, 1407, 2769, 6509, 5950, 480, 3850, 2904, 4565, 1121, 4187, 1987, 6662, 453, 8392, 5380, 1969, 7372, 5825, 1143, 5030, 550, 6857, 4343, 5038, 8003, 1138, 268, 2188, 5223, 6500, 8314, 4268, 7022, 883, 585, 6562, 8317, 8291, 5718, 8644, 3795, 1391, 1073, 2945, 7314, 6817, 3636, 1832, 1914, 3574, 2443, 2416, 5141, 7317, 3769, 5716, 2162, 5260, 5947, 7152, 5989, 1410, 405, 4478, 5689, 207, 1625, 7985, 6267, 8521, 2724, 8156, 1714, 7014, 2485, 4957, 7699, 1148, 1146, 3534, 7951, 2880, 2035, 6305, 2037, 4550, 5848, 2365, 5150, 5525, 4189, 1365, 1579, 5270, 8074, 6815, 3881, 6255, 4433, 221, 3288, 7465, 3884, 1560, 3550, 2411, 5172, 5449, 4722, 8171, 1153, 4831, 1089, 2738, 200, 4461, 2088, 5774, 7304, 2032, 1207, 4806, 1752, 6138, 7751, 5670, 6295, 5661, 2861, 4772, 716, 6602, 7874, 7656, 4075, 5453, 8020, 7185, 1406, 6433, 1281, 4472, 202, 1543, 1738, 865, 120, 7862, 696, 2701, 4387, 4518, 1038, 6411, 4983, 2598, 1517, 8481, 156, 5996, 1107, 5896, 4022, 7101, 3588, 1079, 2493, 1827, 1917, 597, 4512, 6271, 2798, 2776, 6748, 4787, 2672, 4865, 7728, 8319, 1003, 7441, 7700, 3649, 813, 5338, 451, 7680, 8391, 4754, 6205, 2630, 6454, 5014, 4619, 6963, 2377, 1141, 4468, 7263, 237, 4182, 7819, 1173, 7866, 225, 5686, 4706, 6631, 145, 2216, 6057, 2759, 4151, 5235, 4341, 42, 3165, 835, 5020, 2459, 8631, 1194, 3773, 3547, 5882, 7446, 4927, 6955, 4130, 5124, 717, 7512, 4288, 1229, 7329, 6647, 4758, 5505, 770, 4360, 8000, 1277, 5142, 169, 6089, 7897, 8195, 4481, 6612, 2330, 4797, 7399, 4931, 586, 3361, 3359, 4236, 3256, 5459, 6775, 6159, 1371, 3530, 8290, 8028, 4149, 1103, 6825, 1985, 7937, 2563, 1099, 7401, 7998, 3774, 2124, 1518, 3577, 4533, 4913, 3497, 8345, 6034, 6504, 850, 5288, 6977, 3060, 6384, 2071, 1975, 8057, 3745, 6796, 7775, 7193, 8110, 3102, 5339, 4449, 2420, 1110, 1653, 4695, 2437, 1086, 4107, 6558, 3071, 4435, 4161, 1761, 7174, 8638, 956, 7611, 4791, 3730, 1226, 6260, 6382, 3502, 851, 6723, 6284, 321, 2006, 1655, 6894, 1524, 7927, 1320, 7455, 1608, 1213, 763, 181, 89, 75, 5379, 5245, 3486, 3352, 1434, 4145, 5719, 931, 7627, 2915, 4979, 467, 7977, 3210, 5793, 4851, 1793, 7550, 969, 6144, 682, 8423, 5879, 2792, 466, 7244, 6655, 1217, 4474, 5154, 1041, 6753, 504, 3401, 2703, 2309, 1967, 927, 5368, 2517, 7275, 7708, 2353, 3163, 3094, 955, 8213, 1968, 2924, 4194, 4332, 2621, 2617, 2873, 8334, 1147, 2494, 4560, 5640, 191, 5817, 1609, 7030, 3159, 8352, 6861, 1202, 4260, 4321, 273, 2749, 3279, 4141, 65, 985, 4705, 5400, 380, 808, 3357, 2150, 5010, 2886, 3419, 5971, 3614, 1515, 5981, 8005, 5758, 4810, 4399, 6928, 1314, 3384, 7395, 2079, 8273, 3198, 1542, 6681, 2138, 7988, 5738, 1129, 5494, 1817, 1175, 8470, 2406, 6932, 1879, 5242, 4246, 7752, 2463, 3334, 6824, 2282, 5770, 7536, 5837, 417, 6608, 1481, 814, 4159, 6742, 8653, 3910, 641, 2004, 8231, 2337, 3150, 3186, 342, 8609, 1799, 679, 6697, 7144, 799, 4837, 6712, 1472, 7773, 1417, 6470, 8239, 3872, 8235, 5228, 2433, 102, 7481, 6598, 8357, 6059, 4172, 1933, 6531, 3036, 3640, 5198, 8054, 5214, 4014, 3996, 2129, 8172, 6952, 7167, 1444, 3223, 3782, 5874, 7578, 3287, 8295, 3485, 3122, 908, 695, 2136, 5375, 8526, 4656, 6567, 4848, 4109, 5454, 6627, 4086, 3956, 5406, 4549, 8483, 2189, 2096, 2794, 5207, 3937, 1424, 6630, 5514, 2308, 7846, 455, 3747, 1112, 2444, 7833, 8252, 7123, 2991, 1042, 5635, 8561, 6895, 5276, 344, 5060, 2626, 864, 1457, 1890, 6357, 6322, 2858, 4324, 3532, 5899, 4295, 5729, 2061, 1394, 6475, 2214, 7543, 4489, 3236, 8492, 5643, 913, 4229, 897, 6471, 4870, 7013, 530, 5049, 4945, 3170, 4186, 362, 954, 84, 2631, 279, 285, 4874, 305, 4735, 5045, 3086, 8445, 1233, 3986, 3643, 7330, 1580, 2810, 1186, 414, 3121, 8249, 8364, 7110, 6664, 3718, 2042, 3274, 3328, 213, 997, 4475, 2244, 2681, 6911, 4329, 1710, 1489, 6663, 4264, 155, 3469, 2872, 8051, 777, 3199, 7132, 3988, 1476, 7734, 245, 1278, 4629, 5208, 6626, 526, 5185, 3381, 3696, 4008, 899, 1878, 1551, 867, 928, 6403, 5780, 5105, 2051, 6477, 5791, 2402, 3741, 7511, 8632, 2229, 3521, 8575, 660, 4077, 7735, 7486, 4863, 2242, 4215, 7106, 5707, 6424, 5131, 8297, 6718, 5781, 8436, 8640, 930, 1622, 621, 6721, 4390, 4091, 8144, 5373, 8539, 3753, 8229, 3693, 7958, 8460, 518, 7252, 8602, 3820, 1464, 8107, 5224, 2947, 5532, 6941, 2627, 1204, 4572, 726, 5094, 554, 428, 2369, 8278]
prompt/adapter/model_step_0.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d94b9905457a5be3ff0c9c650a07346535834400093eb8c32e7f82c32b41378d
3
+ size 34865914
prompt/adapter/model_step_100.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47e48c8ab5a4ad9dd46b7d616bb60615e3582958acbf29062dc8cee208871633
3
+ size 34865918
prompt/adapter/model_step_200.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c488b76b51c0f76723e814b806e3c9e3a6a1c794f11abeaa0171f3c54fde3e8
3
+ size 34865917
prompt/adapter/model_step_300.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5aa716a822e1499a2fc58435975461731327a576ea283caffb76faa82011a9c
3
+ size 34865917
prompt/adapter/model_step_400.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c52abf30e51764c137a9080e6b1c25c30489a438ca22cdba9ed37d0c029ece27
3
+ size 34865918
prompt/adapter/model_step_500.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:caa52d03db92b743e742210233760f22d202d35259c81d2be23e8c3c025420f9
3
+ size 34865918
prompt/adapter/model_step_520.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20163ab5631c3429fae0ea2d3d910adad24b8969a9515ea34d0c3404e56ecc42
3
+ size 34865915
prompt/adapter/tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7aa4d0bcd9f8a5d6e53c40ed0c82378c8abe90b513016a508dc6acd53a6d3ff
3
+ size 20077688
prompt/backbone/config.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "anhdungitvn/m-llama-bot-general-7b/backbone",
3
+ "activation_function": "gelu_new",
4
+ "architectures": [
5
+ "GPTJForCausalLM"
6
+ ],
7
+ "attn_pdrop": 0.1,
8
+ "bos_token_id": 0,
9
+ "embd_pdrop": 0.1,
10
+ "eos_token_id": 1,
11
+ "gradient_checkpointing": false,
12
+ "initializer_range": 0.02,
13
+ "layer_norm_epsilon": 1e-05,
14
+ "model_type": "gptj",
15
+ "n_embd": 4096,
16
+ "n_head": 16,
17
+ "n_inner": null,
18
+ "n_layer": 28,
19
+ "n_positions": 2048,
20
+ "resid_pdrop": 0.0,
21
+ "rotary_dim": 64,
22
+ "scale_attn_weights": true,
23
+ "summary_activation": null,
24
+ "summary_first_dropout": 0.1,
25
+ "summary_proj_to_labels": true,
26
+ "summary_type": "cls_index",
27
+ "summary_use_proj": true,
28
+ "task_specific_params": {
29
+ "text-generation": {
30
+ "do_sample": true,
31
+ "max_length": 128,
32
+ "temperature": 1.0
33
+ }
34
+ },
35
+ "tie_word_embeddings": false,
36
+ "tokenizer_class": "PreTrainedTokenizerFast",
37
+ "torch_dtype": "float32",
38
+ "transformers_version": "4.26.0",
39
+ "use_cache": true,
40
+ "vocab_size": 64512
41
+ }
prompt/backbone/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 1,
5
+ "transformers_version": "4.26.0"
6
+ }
prompt/backbone/pytorch_model-00001-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a8395b937ae7d2f676121cfaa22a58adbd5020804c3dfd70969be6ec3cb80d6
3
+ size 9967006630
prompt/backbone/pytorch_model-00002-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1b10f6fc8a24173822a290afd7acb870d28b629c4d67a40ba5e4e4ef2eada1d
3
+ size 9983869587
prompt/backbone/pytorch_model-00003-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de39ef6b30f9338bb921e0363e2aa8959137f041cab8f4d1649c197177e63cc5
3
+ size 4832688079
prompt/backbone/pytorch_model.bin.index.json ADDED
@@ -0,0 +1,348 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 24783450224
4
+ },
5
+ "weight_map": {
6
+ "lm_head.bias": "pytorch_model-00003-of-00003.bin",
7
+ "lm_head.weight": "pytorch_model-00003-of-00003.bin",
8
+ "transformer.h.0.attn.bias": "pytorch_model-00001-of-00003.bin",
9
+ "transformer.h.0.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
10
+ "transformer.h.0.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
11
+ "transformer.h.0.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
12
+ "transformer.h.0.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
13
+ "transformer.h.0.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
14
+ "transformer.h.0.ln_1.bias": "pytorch_model-00001-of-00003.bin",
15
+ "transformer.h.0.ln_1.weight": "pytorch_model-00001-of-00003.bin",
16
+ "transformer.h.0.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
17
+ "transformer.h.0.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
18
+ "transformer.h.0.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
19
+ "transformer.h.0.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
20
+ "transformer.h.1.attn.bias": "pytorch_model-00001-of-00003.bin",
21
+ "transformer.h.1.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
22
+ "transformer.h.1.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
23
+ "transformer.h.1.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
24
+ "transformer.h.1.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
25
+ "transformer.h.1.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
26
+ "transformer.h.1.ln_1.bias": "pytorch_model-00001-of-00003.bin",
27
+ "transformer.h.1.ln_1.weight": "pytorch_model-00001-of-00003.bin",
28
+ "transformer.h.1.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
29
+ "transformer.h.1.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
30
+ "transformer.h.1.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
31
+ "transformer.h.1.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
32
+ "transformer.h.10.attn.bias": "pytorch_model-00001-of-00003.bin",
33
+ "transformer.h.10.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
34
+ "transformer.h.10.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
35
+ "transformer.h.10.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
36
+ "transformer.h.10.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
37
+ "transformer.h.10.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
38
+ "transformer.h.10.ln_1.bias": "pytorch_model-00001-of-00003.bin",
39
+ "transformer.h.10.ln_1.weight": "pytorch_model-00001-of-00003.bin",
40
+ "transformer.h.10.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
41
+ "transformer.h.10.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
42
+ "transformer.h.10.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
43
+ "transformer.h.10.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
44
+ "transformer.h.11.attn.bias": "pytorch_model-00001-of-00003.bin",
45
+ "transformer.h.11.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
46
+ "transformer.h.11.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
47
+ "transformer.h.11.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
48
+ "transformer.h.11.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
49
+ "transformer.h.11.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
50
+ "transformer.h.11.ln_1.bias": "pytorch_model-00001-of-00003.bin",
51
+ "transformer.h.11.ln_1.weight": "pytorch_model-00001-of-00003.bin",
52
+ "transformer.h.11.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
53
+ "transformer.h.11.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
54
+ "transformer.h.11.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
55
+ "transformer.h.11.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
56
+ "transformer.h.12.attn.bias": "pytorch_model-00002-of-00003.bin",
57
+ "transformer.h.12.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
58
+ "transformer.h.12.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
59
+ "transformer.h.12.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
60
+ "transformer.h.12.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
61
+ "transformer.h.12.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
62
+ "transformer.h.12.ln_1.bias": "pytorch_model-00002-of-00003.bin",
63
+ "transformer.h.12.ln_1.weight": "pytorch_model-00002-of-00003.bin",
64
+ "transformer.h.12.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
65
+ "transformer.h.12.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
66
+ "transformer.h.12.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
67
+ "transformer.h.12.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
68
+ "transformer.h.13.attn.bias": "pytorch_model-00002-of-00003.bin",
69
+ "transformer.h.13.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
70
+ "transformer.h.13.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
71
+ "transformer.h.13.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
72
+ "transformer.h.13.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
73
+ "transformer.h.13.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
74
+ "transformer.h.13.ln_1.bias": "pytorch_model-00002-of-00003.bin",
75
+ "transformer.h.13.ln_1.weight": "pytorch_model-00002-of-00003.bin",
76
+ "transformer.h.13.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
77
+ "transformer.h.13.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
78
+ "transformer.h.13.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
79
+ "transformer.h.13.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
80
+ "transformer.h.14.attn.bias": "pytorch_model-00002-of-00003.bin",
81
+ "transformer.h.14.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
82
+ "transformer.h.14.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
83
+ "transformer.h.14.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
84
+ "transformer.h.14.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
85
+ "transformer.h.14.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
86
+ "transformer.h.14.ln_1.bias": "pytorch_model-00002-of-00003.bin",
87
+ "transformer.h.14.ln_1.weight": "pytorch_model-00002-of-00003.bin",
88
+ "transformer.h.14.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
89
+ "transformer.h.14.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
90
+ "transformer.h.14.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
91
+ "transformer.h.14.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
92
+ "transformer.h.15.attn.bias": "pytorch_model-00002-of-00003.bin",
93
+ "transformer.h.15.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
94
+ "transformer.h.15.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
95
+ "transformer.h.15.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
96
+ "transformer.h.15.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
97
+ "transformer.h.15.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
98
+ "transformer.h.15.ln_1.bias": "pytorch_model-00002-of-00003.bin",
99
+ "transformer.h.15.ln_1.weight": "pytorch_model-00002-of-00003.bin",
100
+ "transformer.h.15.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
101
+ "transformer.h.15.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
102
+ "transformer.h.15.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
103
+ "transformer.h.15.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
104
+ "transformer.h.16.attn.bias": "pytorch_model-00002-of-00003.bin",
105
+ "transformer.h.16.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
106
+ "transformer.h.16.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
107
+ "transformer.h.16.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
108
+ "transformer.h.16.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
109
+ "transformer.h.16.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
110
+ "transformer.h.16.ln_1.bias": "pytorch_model-00002-of-00003.bin",
111
+ "transformer.h.16.ln_1.weight": "pytorch_model-00002-of-00003.bin",
112
+ "transformer.h.16.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
113
+ "transformer.h.16.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
114
+ "transformer.h.16.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
115
+ "transformer.h.16.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
116
+ "transformer.h.17.attn.bias": "pytorch_model-00002-of-00003.bin",
117
+ "transformer.h.17.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
118
+ "transformer.h.17.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
119
+ "transformer.h.17.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
120
+ "transformer.h.17.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
121
+ "transformer.h.17.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
122
+ "transformer.h.17.ln_1.bias": "pytorch_model-00002-of-00003.bin",
123
+ "transformer.h.17.ln_1.weight": "pytorch_model-00002-of-00003.bin",
124
+ "transformer.h.17.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
125
+ "transformer.h.17.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
126
+ "transformer.h.17.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
127
+ "transformer.h.17.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
128
+ "transformer.h.18.attn.bias": "pytorch_model-00002-of-00003.bin",
129
+ "transformer.h.18.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
130
+ "transformer.h.18.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
131
+ "transformer.h.18.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
132
+ "transformer.h.18.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
133
+ "transformer.h.18.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
134
+ "transformer.h.18.ln_1.bias": "pytorch_model-00002-of-00003.bin",
135
+ "transformer.h.18.ln_1.weight": "pytorch_model-00002-of-00003.bin",
136
+ "transformer.h.18.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
137
+ "transformer.h.18.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
138
+ "transformer.h.18.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
139
+ "transformer.h.18.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
140
+ "transformer.h.19.attn.bias": "pytorch_model-00002-of-00003.bin",
141
+ "transformer.h.19.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
142
+ "transformer.h.19.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
143
+ "transformer.h.19.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
144
+ "transformer.h.19.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
145
+ "transformer.h.19.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
146
+ "transformer.h.19.ln_1.bias": "pytorch_model-00002-of-00003.bin",
147
+ "transformer.h.19.ln_1.weight": "pytorch_model-00002-of-00003.bin",
148
+ "transformer.h.19.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
149
+ "transformer.h.19.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
150
+ "transformer.h.19.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
151
+ "transformer.h.19.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
152
+ "transformer.h.2.attn.bias": "pytorch_model-00001-of-00003.bin",
153
+ "transformer.h.2.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
154
+ "transformer.h.2.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
155
+ "transformer.h.2.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
156
+ "transformer.h.2.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
157
+ "transformer.h.2.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
158
+ "transformer.h.2.ln_1.bias": "pytorch_model-00001-of-00003.bin",
159
+ "transformer.h.2.ln_1.weight": "pytorch_model-00001-of-00003.bin",
160
+ "transformer.h.2.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
161
+ "transformer.h.2.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
162
+ "transformer.h.2.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
163
+ "transformer.h.2.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
164
+ "transformer.h.20.attn.bias": "pytorch_model-00002-of-00003.bin",
165
+ "transformer.h.20.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
166
+ "transformer.h.20.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
167
+ "transformer.h.20.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
168
+ "transformer.h.20.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
169
+ "transformer.h.20.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
170
+ "transformer.h.20.ln_1.bias": "pytorch_model-00002-of-00003.bin",
171
+ "transformer.h.20.ln_1.weight": "pytorch_model-00002-of-00003.bin",
172
+ "transformer.h.20.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
173
+ "transformer.h.20.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
174
+ "transformer.h.20.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
175
+ "transformer.h.20.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
176
+ "transformer.h.21.attn.bias": "pytorch_model-00002-of-00003.bin",
177
+ "transformer.h.21.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
178
+ "transformer.h.21.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
179
+ "transformer.h.21.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
180
+ "transformer.h.21.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
181
+ "transformer.h.21.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
182
+ "transformer.h.21.ln_1.bias": "pytorch_model-00002-of-00003.bin",
183
+ "transformer.h.21.ln_1.weight": "pytorch_model-00002-of-00003.bin",
184
+ "transformer.h.21.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
185
+ "transformer.h.21.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
186
+ "transformer.h.21.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
187
+ "transformer.h.21.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
188
+ "transformer.h.22.attn.bias": "pytorch_model-00002-of-00003.bin",
189
+ "transformer.h.22.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
190
+ "transformer.h.22.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
191
+ "transformer.h.22.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
192
+ "transformer.h.22.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
193
+ "transformer.h.22.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
194
+ "transformer.h.22.ln_1.bias": "pytorch_model-00002-of-00003.bin",
195
+ "transformer.h.22.ln_1.weight": "pytorch_model-00002-of-00003.bin",
196
+ "transformer.h.22.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
197
+ "transformer.h.22.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
198
+ "transformer.h.22.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
199
+ "transformer.h.22.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
200
+ "transformer.h.23.attn.bias": "pytorch_model-00002-of-00003.bin",
201
+ "transformer.h.23.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
202
+ "transformer.h.23.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
203
+ "transformer.h.23.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
204
+ "transformer.h.23.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
205
+ "transformer.h.23.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
206
+ "transformer.h.23.ln_1.bias": "pytorch_model-00002-of-00003.bin",
207
+ "transformer.h.23.ln_1.weight": "pytorch_model-00002-of-00003.bin",
208
+ "transformer.h.23.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
209
+ "transformer.h.23.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
210
+ "transformer.h.23.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
211
+ "transformer.h.23.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
212
+ "transformer.h.24.attn.bias": "pytorch_model-00003-of-00003.bin",
213
+ "transformer.h.24.attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
214
+ "transformer.h.24.attn.masked_bias": "pytorch_model-00003-of-00003.bin",
215
+ "transformer.h.24.attn.out_proj.weight": "pytorch_model-00003-of-00003.bin",
216
+ "transformer.h.24.attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
217
+ "transformer.h.24.attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
218
+ "transformer.h.24.ln_1.bias": "pytorch_model-00003-of-00003.bin",
219
+ "transformer.h.24.ln_1.weight": "pytorch_model-00003-of-00003.bin",
220
+ "transformer.h.24.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
221
+ "transformer.h.24.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
222
+ "transformer.h.24.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
223
+ "transformer.h.24.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
224
+ "transformer.h.25.attn.bias": "pytorch_model-00003-of-00003.bin",
225
+ "transformer.h.25.attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
226
+ "transformer.h.25.attn.masked_bias": "pytorch_model-00003-of-00003.bin",
227
+ "transformer.h.25.attn.out_proj.weight": "pytorch_model-00003-of-00003.bin",
228
+ "transformer.h.25.attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
229
+ "transformer.h.25.attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
230
+ "transformer.h.25.ln_1.bias": "pytorch_model-00003-of-00003.bin",
231
+ "transformer.h.25.ln_1.weight": "pytorch_model-00003-of-00003.bin",
232
+ "transformer.h.25.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
233
+ "transformer.h.25.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
234
+ "transformer.h.25.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
235
+ "transformer.h.25.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
236
+ "transformer.h.26.attn.bias": "pytorch_model-00003-of-00003.bin",
237
+ "transformer.h.26.attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
238
+ "transformer.h.26.attn.masked_bias": "pytorch_model-00003-of-00003.bin",
239
+ "transformer.h.26.attn.out_proj.weight": "pytorch_model-00003-of-00003.bin",
240
+ "transformer.h.26.attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
241
+ "transformer.h.26.attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
242
+ "transformer.h.26.ln_1.bias": "pytorch_model-00003-of-00003.bin",
243
+ "transformer.h.26.ln_1.weight": "pytorch_model-00003-of-00003.bin",
244
+ "transformer.h.26.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
245
+ "transformer.h.26.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
246
+ "transformer.h.26.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
247
+ "transformer.h.26.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
248
+ "transformer.h.27.attn.bias": "pytorch_model-00003-of-00003.bin",
249
+ "transformer.h.27.attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
250
+ "transformer.h.27.attn.masked_bias": "pytorch_model-00003-of-00003.bin",
251
+ "transformer.h.27.attn.out_proj.weight": "pytorch_model-00003-of-00003.bin",
252
+ "transformer.h.27.attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
253
+ "transformer.h.27.attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
254
+ "transformer.h.27.ln_1.bias": "pytorch_model-00003-of-00003.bin",
255
+ "transformer.h.27.ln_1.weight": "pytorch_model-00003-of-00003.bin",
256
+ "transformer.h.27.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
257
+ "transformer.h.27.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
258
+ "transformer.h.27.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
259
+ "transformer.h.27.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
260
+ "transformer.h.3.attn.bias": "pytorch_model-00001-of-00003.bin",
261
+ "transformer.h.3.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
262
+ "transformer.h.3.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
263
+ "transformer.h.3.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
264
+ "transformer.h.3.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
265
+ "transformer.h.3.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
266
+ "transformer.h.3.ln_1.bias": "pytorch_model-00001-of-00003.bin",
267
+ "transformer.h.3.ln_1.weight": "pytorch_model-00001-of-00003.bin",
268
+ "transformer.h.3.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
269
+ "transformer.h.3.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
270
+ "transformer.h.3.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
271
+ "transformer.h.3.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
272
+ "transformer.h.4.attn.bias": "pytorch_model-00001-of-00003.bin",
273
+ "transformer.h.4.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
274
+ "transformer.h.4.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
275
+ "transformer.h.4.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
276
+ "transformer.h.4.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
277
+ "transformer.h.4.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
278
+ "transformer.h.4.ln_1.bias": "pytorch_model-00001-of-00003.bin",
279
+ "transformer.h.4.ln_1.weight": "pytorch_model-00001-of-00003.bin",
280
+ "transformer.h.4.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
281
+ "transformer.h.4.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
282
+ "transformer.h.4.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
283
+ "transformer.h.4.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
284
+ "transformer.h.5.attn.bias": "pytorch_model-00001-of-00003.bin",
285
+ "transformer.h.5.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
286
+ "transformer.h.5.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
287
+ "transformer.h.5.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
288
+ "transformer.h.5.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
289
+ "transformer.h.5.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
290
+ "transformer.h.5.ln_1.bias": "pytorch_model-00001-of-00003.bin",
291
+ "transformer.h.5.ln_1.weight": "pytorch_model-00001-of-00003.bin",
292
+ "transformer.h.5.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
293
+ "transformer.h.5.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
294
+ "transformer.h.5.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
295
+ "transformer.h.5.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
296
+ "transformer.h.6.attn.bias": "pytorch_model-00001-of-00003.bin",
297
+ "transformer.h.6.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
298
+ "transformer.h.6.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
299
+ "transformer.h.6.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
300
+ "transformer.h.6.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
301
+ "transformer.h.6.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
302
+ "transformer.h.6.ln_1.bias": "pytorch_model-00001-of-00003.bin",
303
+ "transformer.h.6.ln_1.weight": "pytorch_model-00001-of-00003.bin",
304
+ "transformer.h.6.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
305
+ "transformer.h.6.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
306
+ "transformer.h.6.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
307
+ "transformer.h.6.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
308
+ "transformer.h.7.attn.bias": "pytorch_model-00001-of-00003.bin",
309
+ "transformer.h.7.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
310
+ "transformer.h.7.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
311
+ "transformer.h.7.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
312
+ "transformer.h.7.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
313
+ "transformer.h.7.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
314
+ "transformer.h.7.ln_1.bias": "pytorch_model-00001-of-00003.bin",
315
+ "transformer.h.7.ln_1.weight": "pytorch_model-00001-of-00003.bin",
316
+ "transformer.h.7.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
317
+ "transformer.h.7.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
318
+ "transformer.h.7.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
319
+ "transformer.h.7.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
320
+ "transformer.h.8.attn.bias": "pytorch_model-00001-of-00003.bin",
321
+ "transformer.h.8.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
322
+ "transformer.h.8.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
323
+ "transformer.h.8.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
324
+ "transformer.h.8.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
325
+ "transformer.h.8.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
326
+ "transformer.h.8.ln_1.bias": "pytorch_model-00001-of-00003.bin",
327
+ "transformer.h.8.ln_1.weight": "pytorch_model-00001-of-00003.bin",
328
+ "transformer.h.8.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
329
+ "transformer.h.8.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
330
+ "transformer.h.8.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
331
+ "transformer.h.8.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
332
+ "transformer.h.9.attn.bias": "pytorch_model-00001-of-00003.bin",
333
+ "transformer.h.9.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
334
+ "transformer.h.9.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
335
+ "transformer.h.9.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
336
+ "transformer.h.9.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
337
+ "transformer.h.9.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
338
+ "transformer.h.9.ln_1.bias": "pytorch_model-00001-of-00003.bin",
339
+ "transformer.h.9.ln_1.weight": "pytorch_model-00001-of-00003.bin",
340
+ "transformer.h.9.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
341
+ "transformer.h.9.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
342
+ "transformer.h.9.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
343
+ "transformer.h.9.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
344
+ "transformer.ln_f.bias": "pytorch_model-00003-of-00003.bin",
345
+ "transformer.ln_f.weight": "pytorch_model-00003-of-00003.bin",
346
+ "transformer.wte.weight": "pytorch_model-00001-of-00003.bin"
347
+ }
348
+ }
prompt/config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "anhdungitvn/ko-gpt-bot-sc-7b",
3
+ "activation_function": "gelu_new",
4
+ "architectures": [
5
+ "GPT6BPromptTuningLM"
6
+ ],
7
+ "attn_pdrop": 0.1,
8
+ "bos_token_id": 0,
9
+ "embd_pdrop": 0.1,
10
+ "eos_token_id": 1,
11
+ "gradient_checkpointing": false,
12
+ "initializer_range": 0.02,
13
+ "layer_norm_epsilon": 1e-05,
14
+ "model_type": "gptj",
15
+ "n_embd": 4096,
16
+ "n_head": 16,
17
+ "n_inner": null,
18
+ "n_layer": 28,
19
+ "n_positions": 2048,
20
+ "pad_token_id": 1,
21
+ "resid_pdrop": 0.0,
22
+ "rotary_dim": 64,
23
+ "scale_attn_weights": true,
24
+ "summary_activation": null,
25
+ "summary_first_dropout": 0.1,
26
+ "summary_proj_to_labels": true,
27
+ "summary_type": "cls_index",
28
+ "summary_use_proj": true,
29
+ "task_specific_params": {
30
+ "text-generation": {
31
+ "do_sample": true,
32
+ "max_length": 128,
33
+ "temperature": 1.0
34
+ }
35
+ },
36
+ "tie_word_embeddings": false,
37
+ "tokenizer_class": "PreTrainedTokenizerFast",
38
+ "torch_dtype": "float32",
39
+ "transformers_version": "4.26.0",
40
+ "use_cache": true,
41
+ "vocab_size": 64512
42
+ }
prompt/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 1,
5
+ "pad_token_id": 1,
6
+ "transformers_version": "4.26.0"
7
+ }
prompt/pytorch_model-00001-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d53f20b8099cc5b036f773d8167eaa4f088e6266e5fb676945db3192832f1607
3
+ size 9947013471
prompt/pytorch_model-00002-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc0befa7334dc05f2048620ca07fa874cdfc622765877aa6903f047aa9688782
3
+ size 9937727189
prompt/pytorch_model-00003-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0cfc0fc1b61c7d5d926eefdb69e00fd01186e3c61d625ed1265ea86d82fafc34
3
+ size 4820103855
prompt/pytorch_model.bin.index.json ADDED
@@ -0,0 +1,349 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 24706838640.0
4
+ },
5
+ "weight_map": {
6
+ "learned_embedding": "pytorch_model-00001-of-00003.bin",
7
+ "lm_head.bias": "pytorch_model-00003-of-00003.bin",
8
+ "lm_head.weight": "pytorch_model-00003-of-00003.bin",
9
+ "transformer.h.0.attn.bias": "pytorch_model-00001-of-00003.bin",
10
+ "transformer.h.0.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
11
+ "transformer.h.0.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
12
+ "transformer.h.0.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
13
+ "transformer.h.0.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
14
+ "transformer.h.0.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
15
+ "transformer.h.0.ln_1.bias": "pytorch_model-00001-of-00003.bin",
16
+ "transformer.h.0.ln_1.weight": "pytorch_model-00001-of-00003.bin",
17
+ "transformer.h.0.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
18
+ "transformer.h.0.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
19
+ "transformer.h.0.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
20
+ "transformer.h.0.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
21
+ "transformer.h.1.attn.bias": "pytorch_model-00001-of-00003.bin",
22
+ "transformer.h.1.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
23
+ "transformer.h.1.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
24
+ "transformer.h.1.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
25
+ "transformer.h.1.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
26
+ "transformer.h.1.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
27
+ "transformer.h.1.ln_1.bias": "pytorch_model-00001-of-00003.bin",
28
+ "transformer.h.1.ln_1.weight": "pytorch_model-00001-of-00003.bin",
29
+ "transformer.h.1.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
30
+ "transformer.h.1.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
31
+ "transformer.h.1.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
32
+ "transformer.h.1.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
33
+ "transformer.h.10.attn.bias": "pytorch_model-00001-of-00003.bin",
34
+ "transformer.h.10.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
35
+ "transformer.h.10.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
36
+ "transformer.h.10.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
37
+ "transformer.h.10.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
38
+ "transformer.h.10.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
39
+ "transformer.h.10.ln_1.bias": "pytorch_model-00001-of-00003.bin",
40
+ "transformer.h.10.ln_1.weight": "pytorch_model-00001-of-00003.bin",
41
+ "transformer.h.10.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
42
+ "transformer.h.10.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
43
+ "transformer.h.10.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
44
+ "transformer.h.10.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
45
+ "transformer.h.11.attn.bias": "pytorch_model-00001-of-00003.bin",
46
+ "transformer.h.11.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
47
+ "transformer.h.11.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
48
+ "transformer.h.11.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
49
+ "transformer.h.11.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
50
+ "transformer.h.11.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
51
+ "transformer.h.11.ln_1.bias": "pytorch_model-00001-of-00003.bin",
52
+ "transformer.h.11.ln_1.weight": "pytorch_model-00001-of-00003.bin",
53
+ "transformer.h.11.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
54
+ "transformer.h.11.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
55
+ "transformer.h.11.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
56
+ "transformer.h.11.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
57
+ "transformer.h.12.attn.bias": "pytorch_model-00002-of-00003.bin",
58
+ "transformer.h.12.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
59
+ "transformer.h.12.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
60
+ "transformer.h.12.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
61
+ "transformer.h.12.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
62
+ "transformer.h.12.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
63
+ "transformer.h.12.ln_1.bias": "pytorch_model-00002-of-00003.bin",
64
+ "transformer.h.12.ln_1.weight": "pytorch_model-00002-of-00003.bin",
65
+ "transformer.h.12.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
66
+ "transformer.h.12.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
67
+ "transformer.h.12.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
68
+ "transformer.h.12.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
69
+ "transformer.h.13.attn.bias": "pytorch_model-00002-of-00003.bin",
70
+ "transformer.h.13.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
71
+ "transformer.h.13.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
72
+ "transformer.h.13.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
73
+ "transformer.h.13.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
74
+ "transformer.h.13.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
75
+ "transformer.h.13.ln_1.bias": "pytorch_model-00002-of-00003.bin",
76
+ "transformer.h.13.ln_1.weight": "pytorch_model-00002-of-00003.bin",
77
+ "transformer.h.13.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
78
+ "transformer.h.13.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
79
+ "transformer.h.13.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
80
+ "transformer.h.13.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
81
+ "transformer.h.14.attn.bias": "pytorch_model-00002-of-00003.bin",
82
+ "transformer.h.14.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
83
+ "transformer.h.14.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
84
+ "transformer.h.14.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
85
+ "transformer.h.14.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
86
+ "transformer.h.14.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
87
+ "transformer.h.14.ln_1.bias": "pytorch_model-00002-of-00003.bin",
88
+ "transformer.h.14.ln_1.weight": "pytorch_model-00002-of-00003.bin",
89
+ "transformer.h.14.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
90
+ "transformer.h.14.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
91
+ "transformer.h.14.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
92
+ "transformer.h.14.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
93
+ "transformer.h.15.attn.bias": "pytorch_model-00002-of-00003.bin",
94
+ "transformer.h.15.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
95
+ "transformer.h.15.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
96
+ "transformer.h.15.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
97
+ "transformer.h.15.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
98
+ "transformer.h.15.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
99
+ "transformer.h.15.ln_1.bias": "pytorch_model-00002-of-00003.bin",
100
+ "transformer.h.15.ln_1.weight": "pytorch_model-00002-of-00003.bin",
101
+ "transformer.h.15.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
102
+ "transformer.h.15.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
103
+ "transformer.h.15.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
104
+ "transformer.h.15.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
105
+ "transformer.h.16.attn.bias": "pytorch_model-00002-of-00003.bin",
106
+ "transformer.h.16.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
107
+ "transformer.h.16.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
108
+ "transformer.h.16.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
109
+ "transformer.h.16.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
110
+ "transformer.h.16.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
111
+ "transformer.h.16.ln_1.bias": "pytorch_model-00002-of-00003.bin",
112
+ "transformer.h.16.ln_1.weight": "pytorch_model-00002-of-00003.bin",
113
+ "transformer.h.16.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
114
+ "transformer.h.16.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
115
+ "transformer.h.16.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
116
+ "transformer.h.16.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
117
+ "transformer.h.17.attn.bias": "pytorch_model-00002-of-00003.bin",
118
+ "transformer.h.17.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
119
+ "transformer.h.17.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
120
+ "transformer.h.17.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
121
+ "transformer.h.17.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
122
+ "transformer.h.17.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
123
+ "transformer.h.17.ln_1.bias": "pytorch_model-00002-of-00003.bin",
124
+ "transformer.h.17.ln_1.weight": "pytorch_model-00002-of-00003.bin",
125
+ "transformer.h.17.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
126
+ "transformer.h.17.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
127
+ "transformer.h.17.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
128
+ "transformer.h.17.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
129
+ "transformer.h.18.attn.bias": "pytorch_model-00002-of-00003.bin",
130
+ "transformer.h.18.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
131
+ "transformer.h.18.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
132
+ "transformer.h.18.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
133
+ "transformer.h.18.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
134
+ "transformer.h.18.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
135
+ "transformer.h.18.ln_1.bias": "pytorch_model-00002-of-00003.bin",
136
+ "transformer.h.18.ln_1.weight": "pytorch_model-00002-of-00003.bin",
137
+ "transformer.h.18.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
138
+ "transformer.h.18.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
139
+ "transformer.h.18.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
140
+ "transformer.h.18.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
141
+ "transformer.h.19.attn.bias": "pytorch_model-00002-of-00003.bin",
142
+ "transformer.h.19.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
143
+ "transformer.h.19.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
144
+ "transformer.h.19.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
145
+ "transformer.h.19.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
146
+ "transformer.h.19.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
147
+ "transformer.h.19.ln_1.bias": "pytorch_model-00002-of-00003.bin",
148
+ "transformer.h.19.ln_1.weight": "pytorch_model-00002-of-00003.bin",
149
+ "transformer.h.19.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
150
+ "transformer.h.19.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
151
+ "transformer.h.19.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
152
+ "transformer.h.19.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
153
+ "transformer.h.2.attn.bias": "pytorch_model-00001-of-00003.bin",
154
+ "transformer.h.2.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
155
+ "transformer.h.2.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
156
+ "transformer.h.2.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
157
+ "transformer.h.2.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
158
+ "transformer.h.2.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
159
+ "transformer.h.2.ln_1.bias": "pytorch_model-00001-of-00003.bin",
160
+ "transformer.h.2.ln_1.weight": "pytorch_model-00001-of-00003.bin",
161
+ "transformer.h.2.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
162
+ "transformer.h.2.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
163
+ "transformer.h.2.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
164
+ "transformer.h.2.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
165
+ "transformer.h.20.attn.bias": "pytorch_model-00002-of-00003.bin",
166
+ "transformer.h.20.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
167
+ "transformer.h.20.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
168
+ "transformer.h.20.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
169
+ "transformer.h.20.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
170
+ "transformer.h.20.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
171
+ "transformer.h.20.ln_1.bias": "pytorch_model-00002-of-00003.bin",
172
+ "transformer.h.20.ln_1.weight": "pytorch_model-00002-of-00003.bin",
173
+ "transformer.h.20.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
174
+ "transformer.h.20.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
175
+ "transformer.h.20.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
176
+ "transformer.h.20.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
177
+ "transformer.h.21.attn.bias": "pytorch_model-00002-of-00003.bin",
178
+ "transformer.h.21.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
179
+ "transformer.h.21.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
180
+ "transformer.h.21.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
181
+ "transformer.h.21.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
182
+ "transformer.h.21.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
183
+ "transformer.h.21.ln_1.bias": "pytorch_model-00002-of-00003.bin",
184
+ "transformer.h.21.ln_1.weight": "pytorch_model-00002-of-00003.bin",
185
+ "transformer.h.21.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
186
+ "transformer.h.21.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
187
+ "transformer.h.21.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
188
+ "transformer.h.21.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
189
+ "transformer.h.22.attn.bias": "pytorch_model-00002-of-00003.bin",
190
+ "transformer.h.22.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
191
+ "transformer.h.22.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
192
+ "transformer.h.22.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
193
+ "transformer.h.22.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
194
+ "transformer.h.22.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
195
+ "transformer.h.22.ln_1.bias": "pytorch_model-00002-of-00003.bin",
196
+ "transformer.h.22.ln_1.weight": "pytorch_model-00002-of-00003.bin",
197
+ "transformer.h.22.mlp.fc_in.bias": "pytorch_model-00002-of-00003.bin",
198
+ "transformer.h.22.mlp.fc_in.weight": "pytorch_model-00002-of-00003.bin",
199
+ "transformer.h.22.mlp.fc_out.bias": "pytorch_model-00002-of-00003.bin",
200
+ "transformer.h.22.mlp.fc_out.weight": "pytorch_model-00002-of-00003.bin",
201
+ "transformer.h.23.attn.bias": "pytorch_model-00002-of-00003.bin",
202
+ "transformer.h.23.attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
203
+ "transformer.h.23.attn.masked_bias": "pytorch_model-00002-of-00003.bin",
204
+ "transformer.h.23.attn.out_proj.weight": "pytorch_model-00002-of-00003.bin",
205
+ "transformer.h.23.attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
206
+ "transformer.h.23.attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
207
+ "transformer.h.23.ln_1.bias": "pytorch_model-00002-of-00003.bin",
208
+ "transformer.h.23.ln_1.weight": "pytorch_model-00002-of-00003.bin",
209
+ "transformer.h.23.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
210
+ "transformer.h.23.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
211
+ "transformer.h.23.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
212
+ "transformer.h.23.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
213
+ "transformer.h.24.attn.bias": "pytorch_model-00003-of-00003.bin",
214
+ "transformer.h.24.attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
215
+ "transformer.h.24.attn.masked_bias": "pytorch_model-00003-of-00003.bin",
216
+ "transformer.h.24.attn.out_proj.weight": "pytorch_model-00003-of-00003.bin",
217
+ "transformer.h.24.attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
218
+ "transformer.h.24.attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
219
+ "transformer.h.24.ln_1.bias": "pytorch_model-00003-of-00003.bin",
220
+ "transformer.h.24.ln_1.weight": "pytorch_model-00003-of-00003.bin",
221
+ "transformer.h.24.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
222
+ "transformer.h.24.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
223
+ "transformer.h.24.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
224
+ "transformer.h.24.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
225
+ "transformer.h.25.attn.bias": "pytorch_model-00003-of-00003.bin",
226
+ "transformer.h.25.attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
227
+ "transformer.h.25.attn.masked_bias": "pytorch_model-00003-of-00003.bin",
228
+ "transformer.h.25.attn.out_proj.weight": "pytorch_model-00003-of-00003.bin",
229
+ "transformer.h.25.attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
230
+ "transformer.h.25.attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
231
+ "transformer.h.25.ln_1.bias": "pytorch_model-00003-of-00003.bin",
232
+ "transformer.h.25.ln_1.weight": "pytorch_model-00003-of-00003.bin",
233
+ "transformer.h.25.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
234
+ "transformer.h.25.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
235
+ "transformer.h.25.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
236
+ "transformer.h.25.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
237
+ "transformer.h.26.attn.bias": "pytorch_model-00003-of-00003.bin",
238
+ "transformer.h.26.attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
239
+ "transformer.h.26.attn.masked_bias": "pytorch_model-00003-of-00003.bin",
240
+ "transformer.h.26.attn.out_proj.weight": "pytorch_model-00003-of-00003.bin",
241
+ "transformer.h.26.attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
242
+ "transformer.h.26.attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
243
+ "transformer.h.26.ln_1.bias": "pytorch_model-00003-of-00003.bin",
244
+ "transformer.h.26.ln_1.weight": "pytorch_model-00003-of-00003.bin",
245
+ "transformer.h.26.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
246
+ "transformer.h.26.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
247
+ "transformer.h.26.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
248
+ "transformer.h.26.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
249
+ "transformer.h.27.attn.bias": "pytorch_model-00003-of-00003.bin",
250
+ "transformer.h.27.attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
251
+ "transformer.h.27.attn.masked_bias": "pytorch_model-00003-of-00003.bin",
252
+ "transformer.h.27.attn.out_proj.weight": "pytorch_model-00003-of-00003.bin",
253
+ "transformer.h.27.attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
254
+ "transformer.h.27.attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
255
+ "transformer.h.27.ln_1.bias": "pytorch_model-00003-of-00003.bin",
256
+ "transformer.h.27.ln_1.weight": "pytorch_model-00003-of-00003.bin",
257
+ "transformer.h.27.mlp.fc_in.bias": "pytorch_model-00003-of-00003.bin",
258
+ "transformer.h.27.mlp.fc_in.weight": "pytorch_model-00003-of-00003.bin",
259
+ "transformer.h.27.mlp.fc_out.bias": "pytorch_model-00003-of-00003.bin",
260
+ "transformer.h.27.mlp.fc_out.weight": "pytorch_model-00003-of-00003.bin",
261
+ "transformer.h.3.attn.bias": "pytorch_model-00001-of-00003.bin",
262
+ "transformer.h.3.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
263
+ "transformer.h.3.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
264
+ "transformer.h.3.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
265
+ "transformer.h.3.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
266
+ "transformer.h.3.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
267
+ "transformer.h.3.ln_1.bias": "pytorch_model-00001-of-00003.bin",
268
+ "transformer.h.3.ln_1.weight": "pytorch_model-00001-of-00003.bin",
269
+ "transformer.h.3.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
270
+ "transformer.h.3.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
271
+ "transformer.h.3.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
272
+ "transformer.h.3.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
273
+ "transformer.h.4.attn.bias": "pytorch_model-00001-of-00003.bin",
274
+ "transformer.h.4.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
275
+ "transformer.h.4.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
276
+ "transformer.h.4.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
277
+ "transformer.h.4.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
278
+ "transformer.h.4.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
279
+ "transformer.h.4.ln_1.bias": "pytorch_model-00001-of-00003.bin",
280
+ "transformer.h.4.ln_1.weight": "pytorch_model-00001-of-00003.bin",
281
+ "transformer.h.4.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
282
+ "transformer.h.4.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
283
+ "transformer.h.4.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
284
+ "transformer.h.4.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
285
+ "transformer.h.5.attn.bias": "pytorch_model-00001-of-00003.bin",
286
+ "transformer.h.5.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
287
+ "transformer.h.5.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
288
+ "transformer.h.5.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
289
+ "transformer.h.5.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
290
+ "transformer.h.5.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
291
+ "transformer.h.5.ln_1.bias": "pytorch_model-00001-of-00003.bin",
292
+ "transformer.h.5.ln_1.weight": "pytorch_model-00001-of-00003.bin",
293
+ "transformer.h.5.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
294
+ "transformer.h.5.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
295
+ "transformer.h.5.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
296
+ "transformer.h.5.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
297
+ "transformer.h.6.attn.bias": "pytorch_model-00001-of-00003.bin",
298
+ "transformer.h.6.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
299
+ "transformer.h.6.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
300
+ "transformer.h.6.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
301
+ "transformer.h.6.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
302
+ "transformer.h.6.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
303
+ "transformer.h.6.ln_1.bias": "pytorch_model-00001-of-00003.bin",
304
+ "transformer.h.6.ln_1.weight": "pytorch_model-00001-of-00003.bin",
305
+ "transformer.h.6.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
306
+ "transformer.h.6.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
307
+ "transformer.h.6.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
308
+ "transformer.h.6.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
309
+ "transformer.h.7.attn.bias": "pytorch_model-00001-of-00003.bin",
310
+ "transformer.h.7.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
311
+ "transformer.h.7.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
312
+ "transformer.h.7.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
313
+ "transformer.h.7.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
314
+ "transformer.h.7.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
315
+ "transformer.h.7.ln_1.bias": "pytorch_model-00001-of-00003.bin",
316
+ "transformer.h.7.ln_1.weight": "pytorch_model-00001-of-00003.bin",
317
+ "transformer.h.7.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
318
+ "transformer.h.7.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
319
+ "transformer.h.7.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
320
+ "transformer.h.7.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
321
+ "transformer.h.8.attn.bias": "pytorch_model-00001-of-00003.bin",
322
+ "transformer.h.8.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
323
+ "transformer.h.8.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
324
+ "transformer.h.8.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
325
+ "transformer.h.8.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
326
+ "transformer.h.8.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
327
+ "transformer.h.8.ln_1.bias": "pytorch_model-00001-of-00003.bin",
328
+ "transformer.h.8.ln_1.weight": "pytorch_model-00001-of-00003.bin",
329
+ "transformer.h.8.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
330
+ "transformer.h.8.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
331
+ "transformer.h.8.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
332
+ "transformer.h.8.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
333
+ "transformer.h.9.attn.bias": "pytorch_model-00001-of-00003.bin",
334
+ "transformer.h.9.attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
335
+ "transformer.h.9.attn.masked_bias": "pytorch_model-00001-of-00003.bin",
336
+ "transformer.h.9.attn.out_proj.weight": "pytorch_model-00001-of-00003.bin",
337
+ "transformer.h.9.attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
338
+ "transformer.h.9.attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
339
+ "transformer.h.9.ln_1.bias": "pytorch_model-00001-of-00003.bin",
340
+ "transformer.h.9.ln_1.weight": "pytorch_model-00001-of-00003.bin",
341
+ "transformer.h.9.mlp.fc_in.bias": "pytorch_model-00001-of-00003.bin",
342
+ "transformer.h.9.mlp.fc_in.weight": "pytorch_model-00001-of-00003.bin",
343
+ "transformer.h.9.mlp.fc_out.bias": "pytorch_model-00001-of-00003.bin",
344
+ "transformer.h.9.mlp.fc_out.weight": "pytorch_model-00001-of-00003.bin",
345
+ "transformer.ln_f.bias": "pytorch_model-00003-of-00003.bin",
346
+ "transformer.ln_f.weight": "pytorch_model-00003-of-00003.bin",
347
+ "transformer.wte.weight": "pytorch_model-00001-of-00003.bin"
348
+ }
349
+ }
prompt/special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[BOS]",
3
+ "eos_token": "[EOS]",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
6
+ }
prompt/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
prompt/tokenizer_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[BOS]",
3
+ "eos_token": "[EOS]",
4
+ "model_max_length": 2048,
5
+ "name_or_path": "anhdungitvn/ko-gpt-bot-sc-7b",
6
+ "pad_token": "[PAD]",
7
+ "special_tokens_map_file": null,
8
+ "tokenizer_class": "PreTrainedTokenizerFast",
9
+ "unk_token": "[UNK]"
10
+ }