{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "cb9e3f4a",
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "[[ 2.193714   -0.24207899  0.47182372 -1.0634022  -1.1899269  -1.2475806\n",
      "   0.9349846   0.30198842 -0.04048488 -0.11865307 -0.15755391 -1.1489013\n",
      "  -1.3347287   0.72849447  0.39505652 -0.970525    0.701021   -0.78113544\n",
      "   0.44730365  2.05135    -0.0323206  -0.64200693 -0.8167455  -2.2955363\n",
      "  -0.40479332 -0.727692    1.3193419  -0.7513297   1.6076493  -1.8091346\n",
      "  -0.9084958   0.24316646  0.37921658  1.1474518   0.34603766 -0.32522526\n",
      "  -3.0729449  -1.5063347   0.66430676 -1.0978827  -0.30379722  2.451149\n",
      "   0.48040217 -0.6373412  -0.6439333   0.8953347  -0.05013736  0.7706291\n",
      "  -0.88299894  0.9075712 ]\n",
      " [-0.5727684  -0.63393694 -0.25125825 -0.4679077  -0.96949255  0.6054719\n",
      "  -1.8479688   1.1771942   0.7693484   1.0193739  -1.1929464   0.35728452\n",
      "   0.929739   -0.07530446  1.529403    2.433878    0.45336998 -1.2475488\n",
      "  -0.38345933  0.04390418  0.43116438 -0.05762207 -0.82676333 -0.02738475\n",
      "  -0.965957   -0.48825058 -0.47159794  1.1846952   1.0482183  -2.7700653\n",
      "  -0.749875    2.8714633  -0.60171384  0.5556536   0.83950627 -0.01687881\n",
      "   0.552545   -1.1928054   2.145737    1.9149841  -0.06043091 -0.90107435\n",
      "   0.147567    0.8224691   1.6581697   1.6010096   0.51598907 -0.74351084\n",
      "  -0.4693698   1.8088094 ]\n",
      " [-1.3517168  -0.47545606  0.73290133  0.23935296 -0.22669503 -0.18945594\n",
      "   0.7160566  -0.78504616  3.446226   -0.18737309 -0.06058075 -0.34784472\n",
      "  -0.72645676  0.31270468 -0.7687908   1.2526172   0.1008345   0.72880065\n",
      "  -0.6560808  -1.5578806   0.88423043  0.09289694 -0.37225932  0.23969\n",
      "  -0.7307049   1.1243073   0.8710831  -0.18960217 -0.2375496  -0.09967642\n",
      "  -2.2595086   0.6176108  -0.61511093  1.1496054   1.0610275   0.9811742\n",
      "   0.4543166  -0.56036836 -1.4878494   0.18835737  0.23832297 -0.53292304\n",
      "   0.41829816  1.1891723  -0.14890046  0.9972082   0.87234277 -1.3080891\n",
      "   0.22663847  0.05517327]\n",
      " [ 2.5704823   0.2712792   0.3306921   1.1550817   0.37311178 -0.81031585\n",
      "   1.8074269  -2.2826307  -0.656243    0.574421   -0.35693032  0.616179\n",
      "  -1.025505   -1.1116307  -0.217424    0.5754443  -1.4513109   1.1311888\n",
      "   0.7891321  -0.52517384  1.1667413  -1.3900373   1.1848512  -0.77211094\n",
      "   0.7698068  -0.5795375  -0.11437181 -0.87774503 -0.8746109  -0.9417029\n",
      "   0.38839945  1.7539735   1.2903697  -1.0992632  -1.1264311   0.03219173\n",
      "   0.72209305 -0.30856818 -0.64159614 -0.01829163 -0.7199702  -0.0775913\n",
      "  -0.4652605   0.26944658  0.45839924  1.1732304  -0.01763551 -1.3559734\n",
      "   1.1798365   0.12395244]\n",
      " [ 1.0204364  -1.7621588   0.7730219   0.07539039  0.18338485 -0.6422277\n",
      "  -1.3975646   1.579905    1.4576416   1.2399558   1.2406974   2.5995128\n",
      "   0.7090687  -2.07974     0.56902915 -0.5790944   1.1386666  -0.08318917\n",
      "   0.26796398 -0.779301    1.2675413   0.01610054  0.05013302 -0.04665853\n",
      "  -2.276604    0.14018564 -0.645477   -0.19165473  2.3544838  -0.6831864\n",
      "  -0.72648555 -0.1773516  -0.36604822  0.09375419  1.2567472   1.9090071\n",
      "   1.7412212  -1.8541421  -0.8824435   0.5318489  -2.1007056  -1.2048476\n",
      "   0.5289677  -1.8236586  -1.4400848   0.23442382 -0.15697928  1.0544\n",
      "  -0.1026449   0.36684537]\n",
      " [-0.16755922 -0.71981984 -1.9845117  -0.01684409 -0.7856071   2.3805144\n",
      "  -0.51250273  1.2001076   0.5695961  -0.36539355  0.381443    1.7367309\n",
      "   0.705143   -0.479629   -1.6061684  -1.209525    0.5079271   0.59274167\n",
      "  -2.044369   -0.5982161  -0.21327493 -0.11257169  1.0244733   0.23873484\n",
      "   2.171669   -0.67574084 -0.14119692 -0.18093303  0.25068912  0.41977623\n",
      "  -0.01738846  0.385363    0.49036577 -0.21657921  1.2423673   0.09706335\n",
      "   0.88684124  0.8851526  -0.5074097   1.0977019   0.6080089  -0.8155795\n",
      "   1.4119246   1.5839422  -0.07519241  0.130981    0.64666164  1.784934\n",
      "   0.815857   -2.0667157 ]\n",
      " [ 0.7341803  -0.35792333 -1.1249198   0.08848908 -1.2269157  -0.5257826\n",
      "   0.6959006   2.9528575   0.8055992   0.15245835  0.37750864  0.5348809\n",
      "   0.89720845  1.2572826   0.50579983 -0.6299582  -2.0352778  -0.24586818\n",
      "  -0.50603664 -0.7549605  -0.24768934  0.7884371  -1.4108869   1.575088\n",
      "   0.29708475 -1.1428709   0.47055507  0.8158965  -0.32330975 -1.5245888\n",
      "  -2.2481675   1.1302909  -1.6964164   0.5491805  -0.5229508  -0.50190055\n",
      "   0.10468259  0.2919974  -0.23856129 -0.78411037 -0.04749353 -0.6825055\n",
      "   0.07721116 -0.21500623 -1.4736687  -1.2994007  -1.4366233  -0.6522017\n",
      "  -0.39740035  1.313039  ]\n",
      " [ 0.80517477 -0.80155426  0.22922666  0.99733955  0.14015755  1.0246505\n",
      "   0.18256684  0.05025564  1.5548835   0.45569864 -0.54957366 -0.7216449\n",
      "   0.94430643  0.88231295  0.37669018 -0.10300364  0.07552347  1.2661922\n",
      "   0.41548994 -1.4628596   0.43351585 -1.9305679   0.71064675  0.00537928\n",
      "   0.09414816  0.47337052 -0.5121384  -1.2694473  -0.4039662  -0.7827149\n",
      "  -0.13256998  0.68740803 -0.09244452 -0.04830151 -0.73362416  0.5184001\n",
      "  -1.1271117  -1.9551272  -1.3159215   1.0971274   0.2631795   0.40150806\n",
      "  -0.02073734  0.37560123 -0.3211884  -0.5551438   1.502158    0.7052456\n",
      "   0.48957896  0.454734  ]\n",
      " [ 0.32813478 -1.6941742   1.4528003   0.4442519  -0.59505296  0.43791974\n",
      "  -1.2401401   0.3489706   1.3503113   0.49132568  0.58165026 -0.67523867\n",
      "   1.0643151   0.15438096  0.74431586  0.5935906   0.7612735   0.79207844\n",
      "  -0.47788948  0.4651727   0.8481056   0.8285899  -1.0201473   0.5316499\n",
      "   0.6433698  -0.28900802 -2.2598019   0.5870532  -0.96624464  0.27725616\n",
      "  -0.13495918 -0.76468927 -0.14318351  0.96473426  0.11620942 -1.9301482\n",
      "   0.94788367 -0.63520294 -1.3172854   0.07192188  0.08613506  0.6776263\n",
      "  -1.5315932  -1.0127441  -1.2187113   0.51857644 -0.3623761  -1.3151339\n",
      "   0.32331517 -1.8784021 ]\n",
      " [-0.18634856  0.45773536  0.35733172 -1.424551    0.8302229  -0.15233986\n",
      "   1.3668064  -1.005086   -0.77341074 -0.81145257 -1.4697174   0.43748215\n",
      "  -0.3382815   0.78897125  0.10472486  0.7830821  -0.6016719   1.2548904\n",
      "   0.698875   -1.2625444  -2.0005207   0.5985631  -0.01753676  0.80074316\n",
      "   1.1874603  -0.48287326  0.31706816 -0.23967849  0.1318787  -1.8263324\n",
      "   1.6830165   0.11534531  1.2804266   1.6539956  -1.4893694   1.3256898\n",
      "  -0.03691823 -0.46920815  0.31242302  1.823104    0.539566   -0.96558857\n",
      "  -0.32707366 -1.1030223   0.04145166 -1.5148767  -0.42181492 -0.3074671\n",
      "  -0.17289098 -0.44251052]\n",
      " [-0.09911606  0.69115496  2.2307765   0.8108336  -0.04861936  0.4300387\n",
      "  -0.1003594   0.12959881  2.3167      0.0053381   0.9297066  -0.3824468\n",
      "  -0.8981607  -0.2478199  -0.12430649  1.0537155   0.62116736  0.7284134\n",
      "  -0.18999189 -1.9500716   1.6507782  -1.1761316   1.1130937  -1.3225411\n",
      "   0.45340213 -1.0849947   0.4973237   0.66574824 -0.6464905  -2.0231586\n",
      "   0.38158894 -0.66094595 -2.0478034   0.18950588 -0.07882092 -0.04731265\n",
      "  -1.268024    0.11605518 -0.23562612 -0.4641062  -1.438138   -0.24716307\n",
      "   0.379282    1.7141463   0.235653   -0.5205612   0.29842672  0.98373735\n",
      "  -0.89973986 -0.36021972]\n",
      " [-0.5389417   1.0717286  -0.15373236 -0.22614111  1.5653211  -0.17643973\n",
      "  -1.0453464  -0.01413153 -1.1039338   0.9860171   0.24528815 -1.1318179\n",
      "   0.14515583 -0.7272173   0.01768782  0.9477563   0.06469248 -0.05001064\n",
      "  -0.7686505   1.2724922   1.9287022   1.1347985  -1.2635216  -0.5304231\n",
      "  -0.8815133  -0.4705466  -0.6998452  -1.0092462  -1.2266477  -0.75610965\n",
      "  -0.88053304  0.14757352  0.37675795 -0.26471892  0.26585776  1.1632661\n",
      "   0.4223105   0.77922666  1.1003295  -1.8407121   0.6967134  -1.3720413\n",
      "  -0.11698486  0.13481373  2.0215309   1.0961701   0.21675943  0.38449198\n",
      "  -1.5260259   0.98250276]\n",
      " [ 0.06328962  0.26891446 -0.8031906   0.94798917 -1.2246903  -0.68882483\n",
      "   2.6452868   0.2752484   1.8533238   0.18546386  0.09132357  0.19416758\n",
      "   0.08833647  1.5202147   0.30509514  0.48741332 -0.428195   -0.60889953\n",
      "   0.12081601 -0.6657615   0.94376314  1.0864261   0.62561405 -0.87672466\n",
      "  -0.5167771   0.5031572  -0.6007953   0.22019558  0.23245488  1.7218516\n",
      "  -0.87615967 -2.3730354  -0.11691173  0.26922143  0.05306546  0.97320646\n",
      "  -0.5862639   0.54759425 -0.00409694  0.25083703 -1.2534264   0.7207481\n",
      "   1.4381061  -0.41723797  0.21565168  0.34882528  0.7290495  -0.33400968\n",
      "  -1.9748569   1.2052348 ]\n",
      " [-0.96623164 -1.1403372   1.8070363  -0.31931973 -1.4428087   0.15304029\n",
      "  -1.0801163  -0.17003135  0.38460478 -1.2667241   0.5576353   2.8585494\n",
      "   0.7977446   2.6139948  -0.2794741   0.72792625  0.11546747  0.13388999\n",
      "   1.0934619   0.03716457  0.45951033 -0.29076946  2.9222586  -0.48757747\n",
      "  -0.9287864   2.0221326   0.9959269   0.8494131  -0.67687964 -0.325538\n",
      "   0.6885636   0.22893535 -0.43545598  0.12330796 -0.9528565   0.36317536\n",
      "  -0.84433717  0.40039846  1.1521208  -0.5055161   0.10692026 -0.16361584\n",
      "   0.47249532 -2.8840866   0.42545685  1.8192348   1.5168514   1.2953353\n",
      "  -0.32133996  0.10010733]]\n",
      "<NDArray 14x50 @cpu(0)>\n",
      "epoch 1,loss 370974.250000\n",
      "epoch 2,loss 370649.750000\n",
      "epoch 3,loss 370019.937500\n",
      "epoch 4,loss 368694.593750\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch 5,loss 365823.531250\n",
      "epoch 6,loss 359544.718750\n",
      "epoch 7,loss 345790.156250\n",
      "epoch 8,loss 316192.468750\n",
      "epoch 9,loss 255987.593750\n",
      "epoch 10,loss 155335.234375\n",
      "epoch 11,loss 61630.332031\n",
      "epoch 12,loss 21866.044922\n",
      "epoch 13,loss 10718.673828\n",
      "epoch 14,loss 6001.973145\n",
      "epoch 15,loss 3656.430908\n",
      "epoch 16,loss 2500.514160\n",
      "epoch 17,loss 1733.890259\n",
      "epoch 18,loss 1228.554688\n",
      "epoch 19,loss 931.277466\n",
      "epoch 20,loss 756.032166\n",
      "epoch 21,loss 547.401794\n",
      "epoch 22,loss 500.170746\n",
      "epoch 23,loss 400.884705\n",
      "epoch 24,loss 301.990265\n",
      "epoch 25,loss 258.441376\n",
      "epoch 26,loss 226.943466\n",
      "epoch 27,loss 172.974930\n",
      "epoch 28,loss 142.185745\n",
      "epoch 29,loss 144.426300\n",
      "epoch 30,loss 99.256584\n",
      "epoch 31,loss 83.354782\n",
      "epoch 32,loss 70.063744\n",
      "epoch 33,loss 61.851353\n",
      "epoch 34,loss 58.307529\n",
      "epoch 35,loss 41.831249\n",
      "epoch 36,loss 35.326729\n",
      "epoch 37,loss 29.762451\n",
      "epoch 38,loss 25.804991\n",
      "epoch 39,loss 22.461985\n",
      "epoch 40,loss 18.208101\n",
      "epoch 41,loss 16.147755\n",
      "epoch 42,loss 12.893285\n",
      "epoch 43,loss 12.256474\n",
      "epoch 44,loss 9.379718\n",
      "epoch 45,loss 8.005094\n",
      "epoch 46,loss 7.270150\n",
      "epoch 47,loss 5.941531\n",
      "epoch 48,loss 5.070173\n",
      "epoch 49,loss 4.200881\n",
      "epoch 50,loss 3.794673\n",
      "epoch 51,loss 3.370697\n",
      "epoch 52,loss 2.753931\n",
      "epoch 53,loss 2.140783\n",
      "epoch 54,loss 2.026075\n",
      "epoch 55,loss 1.703484\n",
      "epoch 56,loss 1.327652\n",
      "epoch 57,loss 1.114997\n",
      "epoch 58,loss 0.960246\n",
      "epoch 59,loss 0.800090\n",
      "epoch 60,loss 0.659877\n",
      "epoch 61,loss 0.595714\n",
      "epoch 62,loss 0.487609\n",
      "epoch 63,loss 0.431381\n",
      "epoch 64,loss 0.396539\n",
      "epoch 65,loss 0.285572\n",
      "epoch 66,loss 0.270539\n",
      "epoch 67,loss 0.222898\n",
      "epoch 68,loss 0.176372\n",
      "epoch 69,loss 0.154919\n",
      "epoch 70,loss 0.123386\n",
      "epoch 71,loss 0.109317\n",
      "epoch 72,loss 0.090862\n",
      "epoch 73,loss 0.085948\n",
      "epoch 74,loss 0.065492\n",
      "epoch 75,loss 0.056148\n",
      "epoch 76,loss 0.052201\n",
      "epoch 77,loss 0.042663\n",
      "epoch 78,loss 0.033742\n",
      "epoch 79,loss 0.027900\n",
      "epoch 80,loss 0.026568\n",
      "epoch 81,loss 0.020183\n",
      "epoch 82,loss 0.017571\n",
      "epoch 83,loss 0.014472\n",
      "epoch 84,loss 0.013365\n",
      "epoch 85,loss 0.010369\n",
      "epoch 86,loss 0.008575\n",
      "epoch 87,loss 0.008268\n",
      "epoch 88,loss 0.006538\n",
      "epoch 89,loss 0.005428\n",
      "epoch 90,loss 0.004483\n",
      "epoch 91,loss 0.003762\n",
      "epoch 92,loss 0.003125\n",
      "epoch 93,loss 0.002739\n",
      "epoch 94,loss 0.002412\n",
      "epoch 95,loss 0.001996\n",
      "epoch 96,loss 0.001622\n",
      "epoch 97,loss 0.001357\n",
      "epoch 98,loss 0.001180\n",
      "epoch 99,loss 0.000966\n",
      "epoch 100,loss 0.000827\n"
     ]
    }
   ],
   "source": [
    "%matplotlib inline\n",
    "import d2lzh as d2l\n",
    "import xlrd\n",
    "import random\n",
    "import math\n",
    "from IPython import display\n",
    "from matplotlib import pyplot as plt\n",
    "from mxnet import autograd, nd\n",
    "batch_size =1\n",
    "num_inputs = 14\n",
    "num_outputs = 1\n",
    "num_hiddens=50\n",
    "num_hiddens1=150\n",
    "\n",
    "w = nd.random.normal(scale=1, shape=(num_inputs, num_hiddens))\n",
    "b = nd.zeros(num_hiddens)\n",
    "w1=nd.random.normal(scale=0.01, shape=(num_hiddens, num_hiddens1))\n",
    "b1= nd.zeros(num_hiddens1)\n",
    "w2=nd.random.normal(scale=0.01, shape=(num_hiddens1, num_outputs))\n",
    "b2= nd.zeros(num_outputs)\n",
    "\n",
    "w.attach_grad()\n",
    "b.attach_grad()\n",
    "w1.attach_grad()\n",
    "b1.attach_grad()\n",
    "w2.attach_grad()\n",
    "b2.attach_grad()\n",
    "\n",
    "params=[w,b,w1,b1,w2,b2]\n",
    "print(w)\n",
    "def use_svg_display():\n",
    "    # 用矢量图显示\n",
    "    display.set_matplotlib_formats('svg')\n",
    "\n",
    "def set_figsize(figsize=(3.5, 2.5)):\n",
    "    use_svg_display()\n",
    "    # 设置图的尺寸\n",
    "    plt.rcParams['figure.figsize'] = figsize\n",
    "\n",
    "def squared_loss(y_hat, y):\n",
    "    return (y_hat - y) ** 2 / batch_size\n",
    "\n",
    "def relu(X):\n",
    "    return nd.maximum(X,0)\n",
    "\n",
    "def net(X):\n",
    "    H=relu(nd.dot(X,w)+b)\n",
    "    Y=nd.dot(H, w1) + b1\n",
    "    return nd.dot(Y, w2) + b2\n",
    "\n",
    "def excel2matrix(path):\n",
    "    data = xlrd.open_workbook(path)\n",
    "    table = data.sheets()[0]\n",
    "    nrows = table.nrows  # 行数\n",
    "    ncols = table.ncols  # 列数\n",
    "    datamatrix = nd.random.normal(scale=1,shape=(nrows, ncols))\n",
    "    for i in range(nrows):\n",
    "        rows = table.row_values(i)\n",
    "        datamatrix[i,:] = rows\n",
    "    return datamatrix\n",
    " \n",
    "def data_iter(batch_size, features, labels):\n",
    "    num_examples = len(features)\n",
    "    indices = list(range(num_examples))\n",
    "    random.shuffle(indices)  # 样本的读取顺序是随机的\n",
    "    for i in range(0, num_examples, batch_size):\n",
    "        j = nd.array(indices[i: min(i + batch_size, num_examples)])\n",
    "        yield features.take(j), labels.take(j)  # take函数根据索引返回对应元素\n",
    "# def cross_entropy(y_hat, y):\n",
    "#     return -nd.pick(y_hat, y).log()\n",
    "# def accuracy(y_hat, y):\n",
    "#     return (y_hat.argmax(axis=1) == y.astype('float32')).mean().asscalar()\n",
    "\n",
    "# def evaluate_accuracy(data_iter, net):\n",
    "#     acc_sum, n = 0.0, 0\n",
    "#     for X, y in data_iter:\n",
    "#         y = y.astype('float32')\n",
    "#         acc_sum += (net(X).argmax(axis=1) == y).sum().asscalar()\n",
    "#         n += y.size\n",
    "#     return acc_sum / n\n",
    "\n",
    "num_epochs, lr = 100, 0.00001\n",
    "\n",
    "def sgd(params, lr, batch_size):  \n",
    "    for param in params:\n",
    "        param[:] = param - lr * param.grad / 2\n",
    "\n",
    "def train_ch3(net, train_iter, test_iter, loss, num_epochs, batch_size,\n",
    "              params=None, lr=None):\n",
    "    for epoch in range(num_epochs):\n",
    "        for X, y in data_iter(batch_size,x,x_label):\n",
    "            with autograd.record():\n",
    "                y_hat = net(X)\n",
    "#                 print('X')\n",
    "#                 print(X)\n",
    "#                 print('y_hat')\n",
    "#                 print(y_hat)\n",
    "#                 print('y')\n",
    "#                 print(y)\n",
    "#                 print('[W,b]')\n",
    "#                 print([w,b])\n",
    "                l = loss(y_hat, y)\n",
    "            l.backward()   #求梯度\n",
    "            sgd(params, lr, batch_size)    #更新wb权重   \n",
    "#         print(params)\n",
    "        train_l_sum =loss(net(x),x_label)  #误差\n",
    "        print('epoch %d,loss %f' % (epoch + 1,train_l_sum.mean().asnumpy()))\n",
    "\n",
    "\n",
    "pathX = 'DD1_train.xls'  #  113.xlsx 在当前文件夹下\n",
    "pathX2 = 'DD1_train_label.xls'  #  113.xlsx 在当前文件夹下\n",
    "pathX3 = 'DD1_test.xls'  #  113.xlsx 在当前文件夹下\n",
    "x = excel2matrix(pathX)\n",
    "x_label=excel2matrix(pathX2)\n",
    "y_test=excel2matrix(pathX3)\n",
    "y_label=nd.zeros((y_test.shape[0],1))\n",
    "\n",
    "train_iter=data_iter(batch_size,x,x_label)\n",
    "test_iter=data_iter(batch_size,y_test,y_label)\n",
    "train_ch3(net, train_iter, test_iter, squared_loss, num_epochs, batch_size,params, lr)\n",
    "#set_figsize()\n",
    "#plt.scatter(x[:, 1].asnumpy(), x_label[:, 0].asnumpy(), 1);  # 加分号只显示图\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "e86025e9",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "[[ 0.00765397 -0.7731842  -0.91937613 -1.1871159  -0.45714337 -0.3397889\n",
      "  -1.0508657   0.3244295  -0.08204205 -1.3622038   1.075414    0.3858128\n",
      "   1.5823568  -0.47382885]\n",
      " [ 0.16973191 -1.0098479  -0.9113491  -0.24463043 -1.8934499  -2.0855565\n",
      "  -0.8887286  -1.2524416  -0.2224404  -1.0885804   0.95109916  1.4881351\n",
      "   0.0103441  -0.5582557 ]\n",
      " [ 1.4787687  -0.3505705  -0.05245888 -0.00900905 -0.4464777  -0.52674\n",
      "  -0.89508694  0.4565726   0.9486779  -0.600794    1.9145384   1.7986484\n",
      "   1.3813362  -0.49980634]\n",
      " [ 1.6875969  -0.3843796  -0.9474707  -0.9514945  -0.7362277  -0.64507246\n",
      "  -0.521536    0.4565726   0.5419615   0.07329291 -0.88254327 -0.7087466\n",
      "  -0.88486546 -1.4308206 ]\n",
      " [ 0.6836955  -0.48580688  0.0719598  -0.24463043  0.05302988 -0.2916801\n",
      "  -0.9920512   1.0439119  -0.99128115 -0.32011575  0.04981731  0.2460818\n",
      "  -0.37546274  0.8821028 ]\n",
      " [ 1.329552   -0.09700228 -0.20898561  0.9334764  -0.32026762 -0.3541744\n",
      "  -0.58352953  0.8808275  -0.4579168   1.0364312  -0.9757793  -0.5767785\n",
      "  -1.2895728  -0.6626295 ]\n",
      " [ 1.7823721   0.24108869  0.517459    0.22661231 -0.3895943  -0.59076196\n",
      "  -0.5612755   0.7838618  -0.09308397  0.6920571   0.11197467 -0.63888115\n",
      "   0.4309376   0.09303685]\n",
      " [ 1.4699922  -1.0267525  -0.18891808  0.17948805 -1.0757506  -1.1814364\n",
      "  -1.362423    0.27418485 -0.21296087 -1.7071444   0.6403123   1.1465704\n",
      "   0.01714154 -0.6417547 ]\n",
      " [ 1.4626427  -1.0267525  -1.5615371  -0.4802518  -1.9147812  -1.8495344\n",
      "  -0.89508694 -1.9051394  -1.8517903  -1.3299925  -0.26096955  0.09082514\n",
      "  -0.6965918   2.5692472 ]]\n",
      "<NDArray 9x14 @cpu(0)>\n"
     ]
    }
   ],
   "source": [
    "y_test=excel2matrix(pathX3)\n",
    "a=net(y_test)\n",
    "print(y_test)\n",
    "#set_figsize()\n",
    "#plt.scatter(x[:, 1].asnumpy(), x_label[:, 0].asnumpy(), 1);  # 加分号只显示图"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "77210598",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[349.95486]\n",
      " [209.99568]\n",
      " [600.01776]\n",
      " [580.0129 ]\n",
      " [520.0392 ]\n",
      " [750.00134]\n",
      " [949.95856]\n",
      " [570.9109 ]\n",
      " [747.7153 ]]\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "a1=a.asnumpy()\n",
    "print(a1)\n",
    "np.savetxt(\"./result.txt\",a1,fmt='%d')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e28522dc",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [conda env:gluon] *",
   "language": "python",
   "name": "conda-env-gluon-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
