{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "cb9e3f4a",
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "[[ 1.0689347   2.265191   -1.5622562  -1.688893   -2.45416     2.135889\n",
      "   1.7398736  -3.862404    0.00977709  1.4362102  -2.4520655   3.5101898\n",
      "  -0.21547404 -2.4955537  -5.026132  ]\n",
      " [-4.470823   -0.74732316 -1.8497151  -1.6574448  -3.1596732   2.411978\n",
      "   5.1076407   4.8905473  -1.4986622  -4.3358607  -3.2250438   1.0367379\n",
      "   2.4880023   3.0673904   1.2141796 ]\n",
      " [ 1.9367265   1.5076444   2.1230702  -1.3112661   0.9106075   0.6396647\n",
      "  -3.6211638  -0.48580265 -0.6238698   3.0633202   1.5371542  -1.8506848\n",
      "  -0.1083389   1.0801586  -0.62453336]\n",
      " [-3.9424427   0.25012785  0.7759891  -1.0153494   1.8162414  -1.099473\n",
      "  -3.1727407  -1.0105685   3.4979162  -3.0031776  -2.8004663  -2.4568505\n",
      "   2.9598403  -1.500948    1.8649081 ]]\n",
      "<NDArray 4x15 @cpu(0)>\n",
      "epoch 1,loss 56561.472656\n",
      "epoch 2,loss 42602.816406\n",
      "epoch 3,loss 42106.132812\n",
      "epoch 4,loss 41838.480469\n",
      "epoch 5,loss 41568.167969\n",
      "epoch 6,loss 41354.937500\n",
      "epoch 7,loss 41176.796875\n",
      "epoch 8,loss 41011.855469\n",
      "epoch 9,loss 40905.144531\n",
      "epoch 10,loss 40748.449219\n",
      "epoch 11,loss 40617.816406\n",
      "epoch 12,loss 40510.945312\n",
      "epoch 13,loss 40406.695312\n",
      "epoch 14,loss 40326.585938\n",
      "epoch 15,loss 40287.695312\n",
      "epoch 16,loss 40216.964844\n",
      "epoch 17,loss 40170.218750\n",
      "epoch 18,loss 40134.527344\n",
      "epoch 19,loss 40077.527344\n",
      "epoch 20,loss 40025.609375\n",
      "epoch 21,loss 39979.546875\n",
      "epoch 22,loss 39932.730469\n",
      "epoch 23,loss 39885.582031\n",
      "epoch 24,loss 39843.019531\n",
      "epoch 25,loss 39787.585938\n",
      "epoch 26,loss 39750.687500\n",
      "epoch 27,loss 39685.171875\n",
      "epoch 28,loss 39655.308594\n",
      "epoch 29,loss 39595.996094\n",
      "epoch 30,loss 39524.632812\n",
      "epoch 31,loss 39478.183594\n",
      "epoch 32,loss 39409.003906\n",
      "epoch 33,loss 39348.261719\n",
      "epoch 34,loss 39276.265625\n",
      "epoch 35,loss 39237.101562\n",
      "epoch 36,loss 39312.054688\n",
      "epoch 37,loss 39097.679688\n",
      "epoch 38,loss 39049.019531\n",
      "epoch 39,loss 38984.875000\n",
      "epoch 40,loss 38907.496094\n",
      "epoch 41,loss 38864.511719\n",
      "epoch 42,loss 38769.109375\n",
      "epoch 43,loss 38714.140625\n",
      "epoch 44,loss 38598.660156\n",
      "epoch 45,loss 38574.539062\n",
      "epoch 46,loss 38453.515625\n",
      "epoch 47,loss 38386.300781\n",
      "epoch 48,loss 38318.605469\n",
      "epoch 49,loss 38251.945312\n",
      "epoch 50,loss 38202.578125\n",
      "epoch 51,loss 38169.343750\n",
      "epoch 52,loss 38052.800781\n",
      "epoch 53,loss 38003.878906\n",
      "epoch 54,loss 37925.750000\n",
      "epoch 55,loss 37865.152344\n",
      "epoch 56,loss 37797.632812\n",
      "epoch 57,loss 37756.660156\n",
      "epoch 58,loss 37744.531250\n",
      "epoch 59,loss 37628.117188\n",
      "epoch 60,loss 37560.304688\n",
      "epoch 61,loss 37493.773438\n",
      "epoch 62,loss 37505.906250\n",
      "epoch 63,loss 37378.828125\n",
      "epoch 64,loss 37320.367188\n",
      "epoch 65,loss 37261.210938\n",
      "epoch 66,loss 37194.863281\n",
      "epoch 67,loss 37146.257812\n",
      "epoch 68,loss 37075.761719\n",
      "epoch 69,loss 37000.132812\n",
      "epoch 70,loss 36920.421875\n",
      "epoch 71,loss 36836.285156\n",
      "epoch 72,loss 36740.898438\n",
      "epoch 73,loss 36676.546875\n",
      "epoch 74,loss 36587.570312\n",
      "epoch 75,loss 36497.121094\n",
      "epoch 76,loss 36377.769531\n",
      "epoch 77,loss 36233.457031\n",
      "epoch 78,loss 36080.023438\n",
      "epoch 79,loss 35940.714844\n",
      "epoch 80,loss 35809.191406\n",
      "epoch 81,loss 35675.414062\n",
      "epoch 82,loss 35600.636719\n",
      "epoch 83,loss 35447.425781\n",
      "epoch 84,loss 35389.335938\n",
      "epoch 85,loss 35233.890625\n",
      "epoch 86,loss 35113.085938\n",
      "epoch 87,loss 35012.863281\n",
      "epoch 88,loss 34894.320312\n",
      "epoch 89,loss 34826.996094\n",
      "epoch 90,loss 34720.937500\n",
      "epoch 91,loss 34603.820312\n",
      "epoch 92,loss 34504.152344\n",
      "epoch 93,loss 34414.226562\n",
      "epoch 94,loss 34332.203125\n",
      "epoch 95,loss 34221.324219\n",
      "epoch 96,loss 34132.113281\n",
      "epoch 97,loss 34026.734375\n",
      "epoch 98,loss 33926.792969\n",
      "epoch 99,loss 33826.640625\n",
      "epoch 100,loss 33736.660156\n",
      "epoch 101,loss 33687.117188\n",
      "epoch 102,loss 33571.351562\n",
      "epoch 103,loss 33452.746094\n",
      "epoch 104,loss 33411.820312\n",
      "epoch 105,loss 33277.765625\n",
      "epoch 106,loss 33189.546875\n",
      "epoch 107,loss 33106.335938\n",
      "epoch 108,loss 32998.933594\n",
      "epoch 109,loss 32908.859375\n",
      "epoch 110,loss 32813.699219\n",
      "epoch 111,loss 32720.082031\n",
      "epoch 112,loss 32680.640625\n",
      "epoch 113,loss 32598.232422\n",
      "epoch 114,loss 32535.148438\n",
      "epoch 115,loss 32366.080078\n",
      "epoch 116,loss 32277.603516\n",
      "epoch 117,loss 32186.988281\n",
      "epoch 118,loss 32106.476562\n",
      "epoch 119,loss 32060.000000\n",
      "epoch 120,loss 31929.386719\n",
      "epoch 121,loss 31847.132812\n",
      "epoch 122,loss 31773.031250\n",
      "epoch 123,loss 31703.687500\n",
      "epoch 124,loss 31650.796875\n",
      "epoch 125,loss 31569.296875\n",
      "epoch 126,loss 31532.130859\n",
      "epoch 127,loss 31371.951172\n",
      "epoch 128,loss 31273.671875\n",
      "epoch 129,loss 31170.042969\n",
      "epoch 130,loss 31101.035156\n",
      "epoch 131,loss 30999.660156\n",
      "epoch 132,loss 30919.347656\n",
      "epoch 133,loss 30829.283203\n",
      "epoch 134,loss 30756.283203\n",
      "epoch 135,loss 30667.873047\n",
      "epoch 136,loss 30603.464844\n",
      "epoch 137,loss 30509.644531\n",
      "epoch 138,loss 30439.595703\n",
      "epoch 139,loss 30359.246094\n",
      "epoch 140,loss 30273.613281\n",
      "epoch 141,loss 30195.685547\n",
      "epoch 142,loss 30122.291016\n",
      "epoch 143,loss 30028.496094\n",
      "epoch 144,loss 29948.447266\n",
      "epoch 145,loss 29882.033203\n",
      "epoch 146,loss 29776.650391\n",
      "epoch 147,loss 29698.349609\n",
      "epoch 148,loss 29611.515625\n",
      "epoch 149,loss 29533.558594\n",
      "epoch 150,loss 29460.212891\n",
      "epoch 151,loss 29394.884766\n",
      "epoch 152,loss 29315.847656\n",
      "epoch 153,loss 29380.019531\n",
      "epoch 154,loss 29220.208984\n",
      "epoch 155,loss 29133.267578\n",
      "epoch 156,loss 29072.097656\n",
      "epoch 157,loss 29017.750000\n",
      "epoch 158,loss 28954.509766\n",
      "epoch 159,loss 28965.960938\n",
      "epoch 160,loss 28817.910156\n",
      "epoch 161,loss 28757.677734\n",
      "epoch 162,loss 28738.619141\n",
      "epoch 163,loss 28686.238281\n",
      "epoch 164,loss 28594.152344\n",
      "epoch 165,loss 28536.886719\n",
      "epoch 166,loss 28528.050781\n",
      "epoch 167,loss 28475.798828\n",
      "epoch 168,loss 28460.093750\n",
      "epoch 169,loss 28353.996094\n",
      "epoch 170,loss 28294.318359\n",
      "epoch 171,loss 28315.128906\n",
      "epoch 172,loss 28234.835938\n",
      "epoch 173,loss 28159.193359\n",
      "epoch 174,loss 28102.792969\n",
      "epoch 175,loss 28066.296875\n",
      "epoch 176,loss 28053.287109\n",
      "epoch 177,loss 28047.886719\n",
      "epoch 178,loss 27954.576172\n",
      "epoch 179,loss 27928.906250\n",
      "epoch 180,loss 27912.656250\n",
      "epoch 181,loss 27823.929688\n",
      "epoch 182,loss 27893.365234\n",
      "epoch 183,loss 27770.398438\n",
      "epoch 184,loss 27815.953125\n",
      "epoch 185,loss 27720.203125\n",
      "epoch 186,loss 27668.744141\n",
      "epoch 187,loss 27684.404297\n",
      "epoch 188,loss 27612.574219\n",
      "epoch 189,loss 27565.597656\n",
      "epoch 190,loss 27553.003906\n",
      "epoch 191,loss 27583.445312\n",
      "epoch 192,loss 27488.408203\n",
      "epoch 193,loss 27479.792969\n",
      "epoch 194,loss 27444.837891\n",
      "epoch 195,loss 27526.306641\n",
      "epoch 196,loss 27406.087891\n",
      "epoch 197,loss 27360.224609\n",
      "epoch 198,loss 27398.373047\n",
      "epoch 199,loss 27311.724609\n",
      "epoch 200,loss 27336.656250\n",
      "epoch 201,loss 27272.824219\n",
      "epoch 202,loss 27308.677734\n",
      "epoch 203,loss 27308.130859\n",
      "epoch 204,loss 27229.667969\n",
      "epoch 205,loss 27187.117188\n",
      "epoch 206,loss 27167.072266\n",
      "epoch 207,loss 27142.248047\n",
      "epoch 208,loss 27157.640625\n",
      "epoch 209,loss 27104.025391\n",
      "epoch 210,loss 27111.712891\n",
      "epoch 211,loss 27073.494141\n",
      "epoch 212,loss 27057.650391\n",
      "epoch 213,loss 27054.093750\n",
      "epoch 214,loss 27027.931641\n",
      "epoch 215,loss 27032.851562\n",
      "epoch 216,loss 27038.062500\n",
      "epoch 217,loss 26983.707031\n",
      "epoch 218,loss 26979.710938\n",
      "epoch 219,loss 26990.166016\n",
      "epoch 220,loss 27005.382812\n",
      "epoch 221,loss 26956.150391\n",
      "epoch 222,loss 26989.265625\n",
      "epoch 223,loss 26978.035156\n",
      "epoch 224,loss 26915.496094\n",
      "epoch 225,loss 26869.361328\n",
      "epoch 226,loss 26941.806641\n",
      "epoch 227,loss 26865.873047\n",
      "epoch 228,loss 26842.583984\n",
      "epoch 229,loss 26847.328125\n",
      "epoch 230,loss 26848.556641\n",
      "epoch 231,loss 26811.148438\n",
      "epoch 232,loss 26835.886719\n",
      "epoch 233,loss 26776.648438\n",
      "epoch 234,loss 26759.970703\n",
      "epoch 235,loss 26763.292969\n",
      "epoch 236,loss 26839.558594\n",
      "epoch 237,loss 26735.345703\n",
      "epoch 238,loss 26716.109375\n",
      "epoch 239,loss 26756.257812\n",
      "epoch 240,loss 26773.744141\n",
      "epoch 241,loss 26680.597656\n",
      "epoch 242,loss 26796.630859\n",
      "epoch 243,loss 26749.154297\n",
      "epoch 244,loss 26653.318359\n",
      "epoch 245,loss 26672.423828\n",
      "epoch 246,loss 26698.828125\n",
      "epoch 247,loss 26834.509766\n",
      "epoch 248,loss 26645.523438\n",
      "epoch 249,loss 26627.089844\n",
      "epoch 250,loss 26614.316406\n",
      "epoch 251,loss 26611.259766\n",
      "epoch 252,loss 26553.095703\n",
      "epoch 253,loss 26563.941406\n",
      "epoch 254,loss 26536.720703\n",
      "epoch 255,loss 26527.533203\n",
      "epoch 256,loss 26557.720703\n",
      "epoch 257,loss 26493.810547\n",
      "epoch 258,loss 26571.328125\n",
      "epoch 259,loss 26556.119141\n",
      "epoch 260,loss 26548.019531\n",
      "epoch 261,loss 26480.998047\n",
      "epoch 262,loss 26489.255859\n",
      "epoch 263,loss 26444.513672\n",
      "epoch 264,loss 26479.964844\n",
      "epoch 265,loss 26447.890625\n",
      "epoch 266,loss 26480.625000\n",
      "epoch 267,loss 26420.330078\n",
      "epoch 268,loss 26393.031250\n",
      "epoch 269,loss 26415.878906\n",
      "epoch 270,loss 26400.777344\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch 271,loss 26392.912109\n",
      "epoch 272,loss 26364.824219\n",
      "epoch 273,loss 26359.369141\n",
      "epoch 274,loss 26365.332031\n",
      "epoch 275,loss 26385.958984\n",
      "epoch 276,loss 26357.148438\n",
      "epoch 277,loss 26398.021484\n",
      "epoch 278,loss 26346.478516\n",
      "epoch 279,loss 26288.644531\n",
      "epoch 280,loss 26262.447266\n",
      "epoch 281,loss 26280.035156\n",
      "epoch 282,loss 26266.203125\n",
      "epoch 283,loss 26328.601562\n",
      "epoch 284,loss 26221.490234\n",
      "epoch 285,loss 26200.435547\n",
      "epoch 286,loss 26194.234375\n",
      "epoch 287,loss 26193.222656\n",
      "epoch 288,loss 26249.519531\n",
      "epoch 289,loss 26158.583984\n",
      "epoch 290,loss 26146.792969\n",
      "epoch 291,loss 26141.416016\n",
      "epoch 292,loss 26122.695312\n",
      "epoch 293,loss 26146.513672\n",
      "epoch 294,loss 26085.804688\n",
      "epoch 295,loss 26105.613281\n",
      "epoch 296,loss 26084.583984\n",
      "epoch 297,loss 26101.529297\n",
      "epoch 298,loss 26101.160156\n",
      "epoch 299,loss 26020.455078\n",
      "epoch 300,loss 26027.171875\n",
      "epoch 301,loss 26084.482422\n",
      "epoch 302,loss 25998.939453\n",
      "epoch 303,loss 25976.568359\n",
      "epoch 304,loss 26012.287109\n",
      "epoch 305,loss 25985.052734\n",
      "epoch 306,loss 25978.824219\n",
      "epoch 307,loss 25980.519531\n",
      "epoch 308,loss 25927.890625\n",
      "epoch 309,loss 25939.359375\n",
      "epoch 310,loss 25940.898438\n",
      "epoch 311,loss 25916.341797\n",
      "epoch 312,loss 25902.490234\n",
      "epoch 313,loss 26042.771484\n",
      "epoch 314,loss 25870.892578\n",
      "epoch 315,loss 25917.806641\n",
      "epoch 316,loss 25867.701172\n",
      "epoch 317,loss 25836.867188\n",
      "epoch 318,loss 25856.753906\n",
      "epoch 319,loss 25848.218750\n",
      "epoch 320,loss 25817.132812\n",
      "epoch 321,loss 25817.722656\n",
      "epoch 322,loss 25790.142578\n",
      "epoch 323,loss 25826.802734\n",
      "epoch 324,loss 25771.359375\n",
      "epoch 325,loss 25940.017578\n",
      "epoch 326,loss 25767.818359\n",
      "epoch 327,loss 25768.162109\n",
      "epoch 328,loss 25746.757812\n",
      "epoch 329,loss 25746.343750\n",
      "epoch 330,loss 25748.849609\n",
      "epoch 331,loss 25841.648438\n",
      "epoch 332,loss 25896.267578\n",
      "epoch 333,loss 25720.042969\n",
      "epoch 334,loss 25723.679688\n",
      "epoch 335,loss 25703.740234\n",
      "epoch 336,loss 25715.429688\n",
      "epoch 337,loss 25683.857422\n",
      "epoch 338,loss 25689.607422\n",
      "epoch 339,loss 25726.898438\n",
      "epoch 340,loss 25702.232422\n",
      "epoch 341,loss 25669.458984\n",
      "epoch 342,loss 25723.324219\n",
      "epoch 343,loss 25741.281250\n",
      "epoch 344,loss 25670.197266\n",
      "epoch 345,loss 25733.541016\n",
      "epoch 346,loss 25632.617188\n",
      "epoch 347,loss 25665.912109\n",
      "epoch 348,loss 25633.341797\n",
      "epoch 349,loss 25656.958984\n",
      "epoch 350,loss 25709.265625\n",
      "epoch 351,loss 25603.318359\n",
      "epoch 352,loss 25613.644531\n",
      "epoch 353,loss 25596.000000\n",
      "epoch 354,loss 25657.898438\n",
      "epoch 355,loss 25607.837891\n",
      "epoch 356,loss 25605.443359\n",
      "epoch 357,loss 25584.384766\n",
      "epoch 358,loss 25564.210938\n",
      "epoch 359,loss 25587.681641\n",
      "epoch 360,loss 25586.289062\n",
      "epoch 361,loss 25640.271484\n",
      "epoch 362,loss 25706.542969\n",
      "epoch 363,loss 25567.281250\n",
      "epoch 364,loss 25529.076172\n",
      "epoch 365,loss 25538.126953\n",
      "epoch 366,loss 25547.707031\n",
      "epoch 367,loss 25508.478516\n",
      "epoch 368,loss 25544.523438\n",
      "epoch 369,loss 25502.296875\n",
      "epoch 370,loss 25562.365234\n",
      "epoch 371,loss 25568.123047\n",
      "epoch 372,loss 25637.517578\n",
      "epoch 373,loss 25539.796875\n",
      "epoch 374,loss 25482.818359\n",
      "epoch 375,loss 25554.857422\n",
      "epoch 376,loss 25482.068359\n",
      "epoch 377,loss 25456.902344\n",
      "epoch 378,loss 25464.552734\n",
      "epoch 379,loss 25479.843750\n",
      "epoch 380,loss 25449.898438\n",
      "epoch 381,loss 25444.523438\n",
      "epoch 382,loss 25613.148438\n",
      "epoch 383,loss 25465.843750\n",
      "epoch 384,loss 25434.287109\n",
      "epoch 385,loss 25451.429688\n",
      "epoch 386,loss 25467.140625\n",
      "epoch 387,loss 25424.257812\n",
      "epoch 388,loss 25458.328125\n",
      "epoch 389,loss 25473.470703\n",
      "epoch 390,loss 25392.689453\n",
      "epoch 391,loss 25466.781250\n",
      "epoch 392,loss 25415.148438\n",
      "epoch 393,loss 25419.822266\n",
      "epoch 394,loss 25391.697266\n",
      "epoch 395,loss 25514.646484\n",
      "epoch 396,loss 25370.402344\n",
      "epoch 397,loss 25383.289062\n",
      "epoch 398,loss 25378.921875\n",
      "epoch 399,loss 25358.925781\n",
      "epoch 400,loss 25377.595703\n",
      "epoch 401,loss 25391.601562\n",
      "epoch 402,loss 25331.548828\n",
      "epoch 403,loss 25358.023438\n",
      "epoch 404,loss 25321.984375\n",
      "epoch 405,loss 25349.556641\n",
      "epoch 406,loss 25321.861328\n",
      "epoch 407,loss 25338.015625\n",
      "epoch 408,loss 25308.015625\n",
      "epoch 409,loss 25320.445312\n",
      "epoch 410,loss 25399.539062\n",
      "epoch 411,loss 25372.283203\n",
      "epoch 412,loss 25301.332031\n",
      "epoch 413,loss 25295.708984\n",
      "epoch 414,loss 25426.929688\n",
      "epoch 415,loss 25401.191406\n",
      "epoch 416,loss 25307.123047\n",
      "epoch 417,loss 25304.378906\n",
      "epoch 418,loss 25263.195312\n",
      "epoch 419,loss 25283.662109\n",
      "epoch 420,loss 25554.373047\n",
      "epoch 421,loss 25231.175781\n",
      "epoch 422,loss 25271.304688\n",
      "epoch 423,loss 25263.382812\n",
      "epoch 424,loss 25267.130859\n",
      "epoch 425,loss 25243.693359\n",
      "epoch 426,loss 25206.169922\n",
      "epoch 427,loss 25229.435547\n",
      "epoch 428,loss 25233.380859\n",
      "epoch 429,loss 25257.445312\n",
      "epoch 430,loss 25217.208984\n",
      "epoch 431,loss 25206.396484\n",
      "epoch 432,loss 25186.080078\n",
      "epoch 433,loss 25226.236328\n",
      "epoch 434,loss 25205.011719\n",
      "epoch 435,loss 25173.878906\n",
      "epoch 436,loss 25233.613281\n",
      "epoch 437,loss 25145.775391\n",
      "epoch 438,loss 25150.759766\n",
      "epoch 439,loss 25179.560547\n",
      "epoch 440,loss 25239.619141\n",
      "epoch 441,loss 25123.857422\n",
      "epoch 442,loss 25267.921875\n",
      "epoch 443,loss 25252.523438\n",
      "epoch 444,loss 25132.910156\n",
      "epoch 445,loss 25106.503906\n",
      "epoch 446,loss 25101.355469\n",
      "epoch 447,loss 25109.091797\n",
      "epoch 448,loss 25144.560547\n",
      "epoch 449,loss 25088.761719\n",
      "epoch 450,loss 25126.451172\n",
      "epoch 451,loss 25110.605469\n",
      "epoch 452,loss 25123.816406\n",
      "epoch 453,loss 25207.419922\n",
      "epoch 454,loss 25061.212891\n",
      "epoch 455,loss 25071.896484\n",
      "epoch 456,loss 25038.730469\n",
      "epoch 457,loss 25064.296875\n",
      "epoch 458,loss 25093.455078\n",
      "epoch 459,loss 25029.181641\n",
      "epoch 460,loss 25019.882812\n",
      "epoch 461,loss 25276.685547\n",
      "epoch 462,loss 25028.660156\n",
      "epoch 463,loss 25011.585938\n",
      "epoch 464,loss 25041.091797\n",
      "epoch 465,loss 24995.306641\n",
      "epoch 466,loss 24981.917969\n",
      "epoch 467,loss 24999.974609\n",
      "epoch 468,loss 24985.882812\n",
      "epoch 469,loss 25011.107422\n",
      "epoch 470,loss 24959.929688\n",
      "epoch 471,loss 25007.640625\n",
      "epoch 472,loss 24991.257812\n",
      "epoch 473,loss 24977.978516\n",
      "epoch 474,loss 24940.398438\n",
      "epoch 475,loss 24970.406250\n",
      "epoch 476,loss 24993.394531\n",
      "epoch 477,loss 25002.720703\n",
      "epoch 478,loss 24915.601562\n",
      "epoch 479,loss 24895.238281\n",
      "epoch 480,loss 24984.671875\n",
      "epoch 481,loss 24929.605469\n",
      "epoch 482,loss 24933.478516\n",
      "epoch 483,loss 24921.449219\n",
      "epoch 484,loss 24891.255859\n",
      "epoch 485,loss 24862.134766\n",
      "epoch 486,loss 24916.953125\n",
      "epoch 487,loss 24949.410156\n",
      "epoch 488,loss 24869.046875\n",
      "epoch 489,loss 24867.203125\n",
      "epoch 490,loss 24856.695312\n",
      "epoch 491,loss 24957.310547\n",
      "epoch 492,loss 24867.222656\n",
      "epoch 493,loss 24839.501953\n",
      "epoch 494,loss 24842.478516\n",
      "epoch 495,loss 24836.644531\n",
      "epoch 496,loss 24828.447266\n",
      "epoch 497,loss 24807.787109\n",
      "epoch 498,loss 24815.199219\n",
      "epoch 499,loss 24791.978516\n",
      "epoch 500,loss 24789.363281\n"
     ]
    }
   ],
   "source": [
    "%matplotlib inline\n",
    "import d2lzh as d2l\n",
    "import xlrd\n",
    "import random\n",
    "import math\n",
    "from IPython import display\n",
    "from matplotlib import pyplot as plt\n",
    "from mxnet import autograd, nd\n",
    "batch_size =10\n",
    "num_inputs = 4\n",
    "num_outputs = 1\n",
    "num_hiddens=15\n",
    "\n",
    "\n",
    "w = nd.random.normal(scale=2, shape=(num_inputs, num_hiddens))\n",
    "b = nd.zeros(num_hiddens)\n",
    "w1=nd.random.normal(scale=2, shape=(num_hiddens, num_outputs))\n",
    "b1= nd.zeros(num_outputs)\n",
    "\n",
    "w.attach_grad()\n",
    "b.attach_grad()\n",
    "w1.attach_grad()\n",
    "b1.attach_grad()\n",
    "\n",
    "params=[w,b,w1,b1]\n",
    "print(w)\n",
    "def use_svg_display():\n",
    "    # 用矢量图显示\n",
    "    display.set_matplotlib_formats('svg')\n",
    "\n",
    "def set_figsize(figsize=(3.5, 2.5)):\n",
    "    use_svg_display()\n",
    "    # 设置图的尺寸\n",
    "    plt.rcParams['figure.figsize'] = figsize\n",
    "\n",
    "def squared_loss(y_hat, y):\n",
    "    return (y_hat - y) ** 2 / 2\n",
    "\n",
    "def relu(X):\n",
    "    return nd.maximum(X,0)\n",
    "\n",
    "def net(X):\n",
    "    H=relu(nd.dot(X,w)+b)\n",
    "    Y=nd.dot(H, w1) + b1\n",
    "    return Y\n",
    "\n",
    "def excel2matrix(path):\n",
    "    data = xlrd.open_workbook(path)\n",
    "    table = data.sheets()[0]\n",
    "    nrows = table.nrows  # 行数\n",
    "    ncols = table.ncols  # 列数\n",
    "    datamatrix = nd.random.normal(scale=1,shape=(nrows, ncols))\n",
    "    for i in range(nrows):\n",
    "        rows = table.row_values(i)\n",
    "        datamatrix[i,:] = rows\n",
    "    return datamatrix\n",
    " \n",
    "def data_iter(batch_size, features, labels):\n",
    "    num_examples = len(features)\n",
    "    indices = list(range(num_examples))\n",
    "    random.shuffle(indices)  # 样本的读取顺序是随机的\n",
    "    for i in range(0, num_examples, batch_size):\n",
    "        j = nd.array(indices[i: min(i + batch_size, num_examples)])\n",
    "        yield features.take(j), labels.take(j)  # take函数根据索引返回对应元素\n",
    "# def cross_entropy(y_hat, y):\n",
    "#     return -nd.pick(y_hat, y).log()\n",
    "# def accuracy(y_hat, y):\n",
    "#     return (y_hat.argmax(axis=1) == y.astype('float32')).mean().asscalar()\n",
    "\n",
    "# def evaluate_accuracy(data_iter, net):\n",
    "#     acc_sum, n = 0.0, 0\n",
    "#     for X, y in data_iter:\n",
    "#         y = y.astype('float32')\n",
    "#         acc_sum += (net(X).argmax(axis=1) == y).sum().asscalar()\n",
    "#         n += y.size\n",
    "#     return acc_sum / n\n",
    "\n",
    "num_epochs, lr = 500, 0.00001\n",
    "\n",
    "def sgd(params, lr, batch_size):  \n",
    "    for param in params:\n",
    "        param[:] = param - lr * param.grad / batch_size\n",
    "\n",
    "def train_ch3(net, train_iter, test_iter, loss, num_epochs, batch_size,\n",
    "              params=None, lr=None):\n",
    "    for epoch in range(num_epochs):\n",
    "        for X, y in data_iter(batch_size,x,x_label):\n",
    "            with autograd.record():\n",
    "                y_hat = net(X)\n",
    "#                 print('X')\n",
    "#                 print(X)\n",
    "#                 print('y_hat')\n",
    "#                 print(y_hat)\n",
    "#                 print('y')\n",
    "#                 print(y)\n",
    "#                 print('[W,b]')\n",
    "#                 print([w,b])\n",
    "                l = loss(y_hat, y)\n",
    "            l.backward()   #求梯度\n",
    "            sgd(params, lr, batch_size)    #更新wb权重   \n",
    "#         print(params)\n",
    "        train_l_sum =loss(net(x),x_label)  #误差\n",
    "        print('epoch %d,loss %f' % (epoch + 1,train_l_sum.mean().asnumpy()))\n",
    "\n",
    "\n",
    "pathX = '1标准化_1_300_307井.xls'  #  113.xlsx 在当前文件夹下\n",
    "pathX2 = '1_300_307井_label.xls'  #  113.xlsx 在当前文件夹下\n",
    "pathX3 = '1标准化_1_300_307井pre.xls'  #  113.xlsx 在当前文件夹下\n",
    "x = excel2matrix(pathX)\n",
    "x_label=excel2matrix(pathX2)\n",
    "y_test=excel2matrix(pathX3)\n",
    "y_label=nd.zeros((y_test.shape[0],1))\n",
    "\n",
    "train_iter=data_iter(batch_size,x,x_label)\n",
    "test_iter=data_iter(batch_size,y_test,y_label)\n",
    "train_ch3(net, train_iter, test_iter, squared_loss, num_epochs, batch_size,params, lr)\n",
    "#set_figsize()\n",
    "#plt.scatter(x[:, 1].asnumpy(), x_label[:, 0].asnumpy(), 1);  # 加分号只显示图\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "7001e715",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[\n",
      "[[  2.5696776    3.7328303    2.106957    -1.334606     0.6676768\n",
      "    4.225206    -2.4017637   -1.3300864    1.3324556   33.84695\n",
      "   -0.74861294   9.744487    -6.685402    -1.3741568  -10.877793  ]\n",
      " [ -7.7084217   -0.98984796 -14.625521   -14.154701    -8.677056\n",
      "    2.354065     0.26585522  12.526954    -0.99732697  -4.691905\n",
      "   -6.5303683   -5.1233125   16.047327    12.036901     2.9177926 ]\n",
      " [ -2.018102     1.6289072    5.4785676   -1.6348462    2.571868\n",
      "    1.7287952   -6.4210935   -1.1634178    0.22893639  20.798275\n",
      "    3.3285258    0.95358014   4.1250124   -0.23697168  -2.8993094 ]\n",
      " [-10.759566    -1.5795132   -6.018621    -1.6540134   -2.402238\n",
      "   -3.9736686   -5.1679316   -2.3400233    2.8656998  -14.757098\n",
      "   -5.2899146  -10.800021    11.310181    -1.8274968    5.705347  ]]\n",
      "<NDArray 4x15 @cpu(0)>, \n",
      "[  4.3771524   -0.13421983  -3.7137642    0.35812432  -1.8386524\n",
      "  -1.205425     3.099019     0.74995154  -0.94387215 -19.62845\n",
      "  -1.9826865   -3.103749    -4.6854806    1.457754     2.5176892 ]\n",
      "<NDArray 15 @cpu(0)>]\n"
     ]
    }
   ],
   "source": [
    "print([w,b])\n",
    "#athX4 = '标准化_272pre_304.xls'  #  113.xlsx 在当前文件夹下\n",
    "#y_test1=excel2matrix(pathX4)\n",
    "a=net(y_test)\n",
    "\n",
    "set_figsize()\n",
    "#plt.scatter(x[:, 1].asnumpy(), x_label[:, 0].asnumpy(), 1);  # 加分号只显示图"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "77210598",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[  129.77756 ]\n",
      " [   73.436775]\n",
      " [   73.436775]\n",
      " ...\n",
      " [-1753.1567  ]\n",
      " [-1753.1567  ]\n",
      " [-1753.1569  ]]\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "a1=a.asnumpy()\n",
    "print(a1)\n",
    "np.savetxt(\"./result.txt\",a1,fmt='%d')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d602ff85",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [conda env:gluon] *",
   "language": "python",
   "name": "conda-env-gluon-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
