{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "cb9e3f4a",
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "[[ 0.38242745 -0.89261854 -0.799134    0.40822244  0.8142611   0.09355491\n",
      "  -1.7350665   1.5486968  -0.5758122   0.855615    0.2747267  -0.48888794\n",
      "   0.21290363  0.3266078   0.08797776 -0.60153437 -0.654938    1.6079236\n",
      "   0.27368668  0.74624586]\n",
      " [-0.90865225 -0.50927305  0.03280422  0.25718853  0.02027451 -0.70443887\n",
      "  -1.6782768  -2.7917879  -0.12364082 -0.36128756 -1.4830918  -1.0358013\n",
      "  -0.4695727   0.01925124 -0.1985765   0.935016    0.03415474  0.89851373\n",
      "  -0.21437688  0.20124865]\n",
      " [ 0.4893068   0.8223781  -0.2461001  -0.24543795 -0.38807502  0.50786024\n",
      "  -0.05190088  2.565217    0.4654234  -0.11808653 -0.8884127  -1.5194063\n",
      "  -0.8427608   0.49972773 -1.523162    0.42265078 -0.7250483  -1.2610698\n",
      "   0.21973047  0.13487762]\n",
      " [-0.66791767  1.0904073  -1.8273833   0.24907225 -2.028013    0.34917158\n",
      "  -1.7765714   0.17073451 -0.03335929 -2.0311198   1.5284077   0.44033238\n",
      "  -1.3511574  -1.109794   -1.2587345  -0.55473953  0.37751237  0.80353266\n",
      "  -0.89804345  0.48811144]]\n",
      "<NDArray 4x20 @cpu(0)>\n",
      "epoch 1,loss 22727.607422\n",
      "epoch 2,loss 21538.548828\n",
      "epoch 3,loss 21375.857422\n",
      "epoch 4,loss 23687.140625\n",
      "epoch 5,loss 24001.216797\n",
      "epoch 6,loss 21966.644531\n",
      "epoch 7,loss 21132.714844\n",
      "epoch 8,loss 23358.492188\n",
      "epoch 9,loss 20943.861328\n",
      "epoch 10,loss 21889.509766\n",
      "epoch 11,loss 20949.031250\n",
      "epoch 12,loss 21035.976562\n",
      "epoch 13,loss 20671.285156\n",
      "epoch 14,loss 21043.367188\n",
      "epoch 15,loss 20694.857422\n",
      "epoch 16,loss 28248.470703\n",
      "epoch 17,loss 20655.917969\n",
      "epoch 18,loss 23086.636719\n",
      "epoch 19,loss 22221.199219\n",
      "epoch 20,loss 20560.542969\n",
      "epoch 21,loss 20464.408203\n",
      "epoch 22,loss 20817.332031\n",
      "epoch 23,loss 20818.814453\n",
      "epoch 24,loss 19940.800781\n",
      "epoch 25,loss 18764.494141\n",
      "epoch 26,loss 17796.708984\n",
      "epoch 27,loss 19288.394531\n",
      "epoch 28,loss 17569.300781\n",
      "epoch 29,loss 16810.333984\n",
      "epoch 30,loss 17253.291016\n",
      "epoch 31,loss 20217.248047\n",
      "epoch 32,loss 16554.457031\n",
      "epoch 33,loss 16846.113281\n",
      "epoch 34,loss 17508.335938\n",
      "epoch 35,loss 17864.076172\n",
      "epoch 36,loss 17121.771484\n",
      "epoch 37,loss 16619.144531\n",
      "epoch 38,loss 16402.863281\n",
      "epoch 39,loss 16922.074219\n",
      "epoch 40,loss 18447.689453\n",
      "epoch 41,loss 18290.050781\n",
      "epoch 42,loss 16283.833008\n",
      "epoch 43,loss 16513.966797\n",
      "epoch 44,loss 17388.072266\n",
      "epoch 45,loss 16534.894531\n",
      "epoch 46,loss 16832.716797\n",
      "epoch 47,loss 16172.129883\n",
      "epoch 48,loss 15982.818359\n",
      "epoch 49,loss 16174.174805\n",
      "epoch 50,loss 16307.629883\n",
      "epoch 51,loss 16219.955078\n",
      "epoch 52,loss 16102.608398\n",
      "epoch 53,loss 16765.367188\n",
      "epoch 54,loss 16560.029297\n",
      "epoch 55,loss 20375.828125\n",
      "epoch 56,loss 17586.923828\n",
      "epoch 57,loss 15663.514648\n",
      "epoch 58,loss 15897.070312\n",
      "epoch 59,loss 15713.344727\n",
      "epoch 60,loss 15756.952148\n",
      "epoch 61,loss 16914.953125\n",
      "epoch 62,loss 16681.111328\n",
      "epoch 63,loss 15533.997070\n",
      "epoch 64,loss 15443.905273\n",
      "epoch 65,loss 16349.008789\n",
      "epoch 66,loss 15650.544922\n",
      "epoch 67,loss 20062.412109\n",
      "epoch 68,loss 15511.821289\n",
      "epoch 69,loss 16916.847656\n",
      "epoch 70,loss 15740.143555\n",
      "epoch 71,loss 17516.519531\n",
      "epoch 72,loss 16373.294922\n",
      "epoch 73,loss 15973.195312\n",
      "epoch 74,loss 16354.775391\n",
      "epoch 75,loss 18412.519531\n",
      "epoch 76,loss 16088.453125\n",
      "epoch 77,loss 16660.455078\n",
      "epoch 78,loss 15921.778320\n",
      "epoch 79,loss 15709.373047\n",
      "epoch 80,loss 15286.693359\n",
      "epoch 81,loss 15256.696289\n",
      "epoch 82,loss 15583.365234\n",
      "epoch 83,loss 17146.638672\n",
      "epoch 84,loss 17062.005859\n",
      "epoch 85,loss 15378.301758\n",
      "epoch 86,loss 15973.226562\n",
      "epoch 87,loss 15823.988281\n",
      "epoch 88,loss 15286.311523\n",
      "epoch 89,loss 16194.856445\n",
      "epoch 90,loss 16323.889648\n",
      "epoch 91,loss 16340.236328\n",
      "epoch 92,loss 15319.834961\n",
      "epoch 93,loss 15473.708008\n",
      "epoch 94,loss 16047.031250\n",
      "epoch 95,loss 15641.825195\n",
      "epoch 96,loss 16269.276367\n",
      "epoch 97,loss 15259.035156\n",
      "epoch 98,loss 15273.292969\n",
      "epoch 99,loss 14975.377930\n",
      "epoch 100,loss 15122.775391\n"
     ]
    }
   ],
   "source": [
    "%matplotlib inline\n",
    "import d2lzh as d2l\n",
    "import xlrd\n",
    "import random\n",
    "import math\n",
    "from IPython import display\n",
    "from matplotlib import pyplot as plt\n",
    "from mxnet import autograd, nd\n",
    "batch_size =10\n",
    "num_inputs = 4\n",
    "num_outputs = 1\n",
    "num_hiddens=20\n",
    "\n",
    "\n",
    "w = nd.random.normal(scale=1, shape=(num_inputs, num_hiddens))\n",
    "b = nd.zeros(num_hiddens)\n",
    "w1=nd.random.normal(scale=1, shape=(num_hiddens, num_outputs))\n",
    "b1= nd.zeros(num_outputs)\n",
    "\n",
    "H1=nd.random.normal(scale=1, shape=(1, num_hiddens))\n",
    "wh1=nd.random.normal(scale=1, shape=(num_hiddens, num_hiddens))\n",
    "\n",
    "\n",
    "w.attach_grad()\n",
    "b.attach_grad()\n",
    "w1.attach_grad()\n",
    "b1.attach_grad()\n",
    "wh1.attach_grad()\n",
    "\n",
    "\n",
    "\n",
    "params=[w,b,w1,b1,wh1]\n",
    "print(w)\n",
    "def use_svg_display():\n",
    "    # 用矢量图显示\n",
    "    display.set_matplotlib_formats('svg')\n",
    "\n",
    "def set_figsize(figsize=(3.5, 2.5)):\n",
    "    use_svg_display()\n",
    "    # 设置图的尺寸\n",
    "    plt.rcParams['figure.figsize'] = figsize\n",
    "\n",
    "def squared_loss(y_hat, y):\n",
    "    return (y_hat - y) ** 2 / 2\n",
    "\n",
    "def relu(X):\n",
    "    return nd.maximum(X,0)\n",
    "\n",
    "def net(X,H1):\n",
    "    H=relu(nd.dot(X,w)+nd.dot(H1,wh1)+b)\n",
    "    Y=nd.dot(H, w1)+ b1\n",
    "    H1=H\n",
    "\n",
    "    return Y\n",
    "\n",
    "def excel2matrix(path):\n",
    "    data = xlrd.open_workbook(path)\n",
    "    table = data.sheets()[0]\n",
    "    nrows = table.nrows  # 行数\n",
    "    ncols = table.ncols  # 列数\n",
    "    datamatrix = nd.random.normal(scale=1,shape=(nrows, ncols))\n",
    "    for i in range(nrows):\n",
    "        rows = table.row_values(i)\n",
    "        datamatrix[i,:] = rows\n",
    "    return datamatrix\n",
    " \n",
    "def data_iter(batch_size, features, labels):\n",
    "    num_examples = len(features)\n",
    "    indices = list(range(num_examples))\n",
    "    random.shuffle(indices)  # 样本的读取顺序是随机的\n",
    "    for i in range(0, num_examples, batch_size):\n",
    "        j = nd.array(indices[i: min(i + batch_size, num_examples)])\n",
    "        yield features.take(j), labels.take(j)  # take函数根据索引返回对应元素\n",
    "# def cross_entropy(y_hat, y):\n",
    "#     return -nd.pick(y_hat, y).log()\n",
    "# def accuracy(y_hat, y):\n",
    "#     return (y_hat.argmax(axis=1) == y.astype('float32')).mean().asscalar()\n",
    "\n",
    "# def evaluate_accuracy(data_iter, net):\n",
    "#     acc_sum, n = 0.0, 0\n",
    "#     for X, y in data_iter:\n",
    "#         y = y.astype('float32')\n",
    "#         acc_sum += (net(X).argmax(axis=1) == y).sum().asscalar()\n",
    "#         n += y.size\n",
    "#     return acc_sum / n\n",
    "\n",
    "num_epochs, lr = 100, 0.0001\n",
    "\n",
    "def sgd(params, lr, batch_size):  \n",
    "    for param in params:\n",
    "        param[:] = param - lr * param.grad / batch_size\n",
    "\n",
    "def train_ch3(net, train_iter, test_iter, loss, num_epochs, batch_size,\n",
    "              params=None, lr=None):\n",
    "    for epoch in range(num_epochs):\n",
    "        for X, y in data_iter(batch_size,x,x_label):\n",
    "            with autograd.record():\n",
    "                y_hat = net(X,H1)\n",
    "#                 print('X')\n",
    "#                 print(X)\n",
    "#                 print('y_hat')\n",
    "#                 print(y_hat)\n",
    "#                 print('y')\n",
    "#                 print(y)\n",
    "#                 print('[W,b]')\n",
    "#                 print([w,b])\n",
    "                l = loss(y_hat, y)\n",
    "            l.backward()   #求梯度\n",
    "            sgd(params, lr, batch_size)    #更新wb权重   \n",
    "#         print(params)\n",
    "        train_l_sum =loss(net(x,H1),x_label)  #误差\n",
    "        print('epoch %d,loss %f' % (epoch + 1,train_l_sum.mean().asnumpy()))\n",
    "\n",
    "\n",
    "pathX = '标准化_272.xls'  #  113.xlsx 在当前文件夹下\n",
    "pathX2 = '标准化_272label.xls'  #  113.xlsx 在当前文件夹下\n",
    "pathX3 = '标准化_272pre.xls'  #  113.xlsx 在当前文件夹下\n",
    "x = excel2matrix(pathX)\n",
    "x_label=excel2matrix(pathX2)\n",
    "y_test=excel2matrix(pathX3)\n",
    "y_label=nd.zeros((y_test.shape[0],1))\n",
    "\n",
    "train_iter=data_iter(batch_size,x,x_label)\n",
    "test_iter=data_iter(batch_size,y_test,y_label)\n",
    "train_ch3(net, train_iter, test_iter, squared_loss, num_epochs, batch_size,params, lr)\n",
    "#set_figsize()\n",
    "#plt.scatter(x[:, 1].asnumpy(), x_label[:, 0].asnumpy(), 1);  # 加分号只显示图\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "7001e715",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[\n",
      "[[ 3.25247431e+00 -2.28601426e-01  3.14794636e+00  7.43711889e-01\n",
      "   2.50734067e+00  1.66520584e+00 -5.96163094e-01  2.63230234e-01\n",
      "  -3.62727237e+00 -5.32804728e-01 -3.72549057e+00 -7.16094673e-01\n",
      "   1.61325836e+00  7.67477846e+00  2.43613124e+00 -5.44204807e+00\n",
      "  -1.07520175e+00 -4.21656638e-01  1.05753601e-01 -1.84005952e+00]\n",
      " [ 4.51155710e+00  8.78301620e-01  3.32174206e+00  1.89531398e+00\n",
      "   2.19480228e+00  4.25343704e+00  2.29637250e-02 -1.07726061e+00\n",
      "  -1.97299061e+01  4.00938153e-01 -2.35254288e+01 -7.08638847e-01\n",
      "  -1.30464911e+00  1.98890038e+01  1.47255492e+00 -4.01326370e+00\n",
      "  -1.61487639e-01  1.25351176e-01  5.57336092e+00  1.46359384e+00]\n",
      " [-6.17262125e-02  3.98973525e-01 -1.26347148e+00  5.28141737e-01\n",
      "  -3.11892062e-01  2.65283370e+00 -1.67880881e+00 -1.45541832e-01\n",
      "   3.91551465e-01 -1.50915487e-02 -2.60970313e-02 -1.22226834e+00\n",
      "   1.22708194e-01  1.30334353e+00  2.98767686e-01  2.80267328e-01\n",
      "   1.30258703e+00 -1.30414963e-01 -1.02034914e+00  3.32215160e-01]\n",
      " [-1.36021030e+00  5.13167024e-01 -1.35562754e+00  1.04798663e+00\n",
      "  -7.43079960e-01 -2.38502097e+00 -6.74818277e-01 -4.01483960e-02\n",
      "   1.92418849e+00 -1.30582356e+00 -1.58344305e+00 -8.24135244e-01\n",
      "   1.08656788e+00 -5.07672501e+00 -1.36880970e+00  3.74445534e+00\n",
      "   5.08086145e-01 -1.05687566e-01 -3.46355677e-01  1.34315109e+00]]\n",
      "<NDArray 4x20 @cpu(0)>, \n",
      "[ 0.56356776 -0.01938334  0.5754443   0.40532506  0.20081761  0.35816148\n",
      " -0.06213063  0.         -0.07717113  0.         -1.3155204   0.\n",
      "  0.         -0.20746432 -0.10980491 -0.6539509   0.17930366  0.\n",
      "  0.16506532  0.14446297]\n",
      "<NDArray 20 @cpu(0)>]\n"
     ]
    }
   ],
   "source": [
    "print([w,b])\n",
    "pathX4 = '标准化_272pre_304.xls'  #  113.xlsx 在当前文件夹下\n",
    "y_test1=excel2matrix(pathX4)\n",
    "a=net(y_test1,H1)\n",
    "\n",
    "set_figsize()\n",
    "#plt.scatter(x[:, 1].asnumpy(), x_label[:, 0].asnumpy(), 1);  # 加分号只显示图"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "77210598",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[224.22295]\n",
      " [284.00104]\n",
      " [317.92816]\n",
      " ...\n",
      " [445.25513]\n",
      " [430.54095]\n",
      " [445.25513]]\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "a1=a.asnumpy()\n",
    "print(a1)\n",
    "np.savetxt(\"./result.txt\",a1,fmt='%d')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d602ff85",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [conda env:gluon] *",
   "language": "python",
   "name": "conda-env-gluon-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
