{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Using TensorFlow backend.\n",
      "/Users/jiangmaowei/opt/anaconda3/envs/pytorch/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:526: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n",
      "/Users/jiangmaowei/opt/anaconda3/envs/pytorch/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:527: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n",
      "/Users/jiangmaowei/opt/anaconda3/envs/pytorch/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:528: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n",
      "/Users/jiangmaowei/opt/anaconda3/envs/pytorch/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:529: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n",
      "/Users/jiangmaowei/opt/anaconda3/envs/pytorch/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:530: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n",
      "/Users/jiangmaowei/opt/anaconda3/envs/pytorch/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:535: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "'\\n    import independency packages\\n'"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from keras.models import Sequential\n",
    "from keras.layers import Conv2D, MaxPool2D\n",
    "from keras.layers import Dense, Flatten\n",
    "import keras\n",
    "from keras.datasets import mnist\n",
    "'''\n",
    "    import independency packages\n",
    "'''"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Downloading data from https://s3.amazonaws.com/img-datasets/mnist.npz\n",
      "11493376/11490434 [==============================] - 86s 7us/step\n"
     ]
    }
   ],
   "source": [
    "\n",
    "#加载数据\n",
    "(X_train, y_train), (X_test, y_test) = mnist.load_data()\n",
    "img_x, img_y = X_train.shape[1], X_train.shape[2]\n",
    "\n",
    "#数据预处理\n",
    "X_train = X_train.reshape(X_train.shape[0], img_x, img_y, 1)\n",
    "X_test = X_test.reshape(X_test.shape[0], img_x, img_y, 1)\n",
    "X_train = X_train.astype('float32')\n",
    "X_test = X_test.astype('float32')\n",
    "X_train /= 255\n",
    "X_test /= 255\n",
    "\n",
    "#one-hot for y_labels\n",
    "y_train = keras.utils.to_categorical(y_train, num_classes=10)\n",
    "y_test = keras.utils.to_categorical(y_test, num_classes=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /Users/jiangmaowei/opt/anaconda3/envs/pytorch/lib/python3.6/site-packages/tensorflow/python/ops/resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Colocations handled automatically by placer.\n",
      "WARNING:tensorflow:From /Users/jiangmaowei/opt/anaconda3/envs/pytorch/lib/python3.6/site-packages/tensorflow/python/ops/math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use tf.cast instead.\n",
      "Epoch 1/10\n",
      "60000/60000 [==============================] - 38s 637us/step - loss: 0.1484 - accuracy: 0.9549\n",
      "Epoch 2/10\n",
      "60000/60000 [==============================] - 34s 569us/step - loss: 0.0403 - accuracy: 0.9875\n",
      "Epoch 3/10\n",
      "60000/60000 [==============================] - 36s 604us/step - loss: 0.0287 - accuracy: 0.9910\n",
      "Epoch 4/10\n",
      "60000/60000 [==============================] - 39s 651us/step - loss: 0.0198 - accuracy: 0.9939\n",
      "Epoch 5/10\n",
      "60000/60000 [==============================] - 33s 542us/step - loss: 0.0158 - accuracy: 0.9949\n",
      "Epoch 6/10\n",
      "60000/60000 [==============================] - 31s 522us/step - loss: 0.0124 - accuracy: 0.9957\n",
      "Epoch 7/10\n",
      "60000/60000 [==============================] - 31s 511us/step - loss: 0.0128 - accuracy: 0.9960\n",
      "Epoch 8/10\n",
      "60000/60000 [==============================] - 32s 528us/step - loss: 0.0092 - accuracy: 0.9973\n",
      "Epoch 9/10\n",
      "60000/60000 [==============================] - 33s 555us/step - loss: 0.0076 - accuracy: 0.9975\n",
      "Epoch 10/10\n",
      "60000/60000 [==============================] - 33s 548us/step - loss: 0.0081 - accuracy: 0.9974\n"
     ]
    }
   ],
   "source": [
    "#构建模型\n",
    "model = Sequential()\n",
    "model.add(Conv2D(32, kernel_size=(5,5), activation='relu', input_shape=(img_x, img_y, 1)))\n",
    "model.add(MaxPool2D(pool_size=(2,2), strides=(2,2)))\n",
    "model.add(Conv2D(64, kernel_size=(5,5), activation='relu'))\n",
    "model.add(MaxPool2D(pool_size=(2,2), strides=(2,2)))\n",
    "model.add(Flatten())\n",
    "model.add(Dense(1000, activation='relu'))\n",
    "model.add(Dense(10, activation='softmax'))\n",
    "\n",
    "#模型编译\n",
    "model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n",
    "\n",
    "#训练\n",
    "model.fit(X_train, y_train, batch_size=128, epochs=10)\n",
    "#save_model\n",
    "model.save('cnn_model')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "10000/10000 [==============================] - 2s 175us/step\n",
      "[0.033100337240248974, 0.9912999868392944]\n",
      "loss 0.033100337240248974\n",
      "acc 0.9912999868392944\n"
     ]
    }
   ],
   "source": [
    "#评估模型\n",
    "score = model.evaluate(X_test, y_test)\n",
    "print(score)\n",
    "print('loss',score[0])\n",
    "print('acc', score[1])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(28, 28)\n",
      "(1, 28, 28, 1)\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAARAklEQVR4nO3de5CV9X3H8c93l2WRm2FFVuSiCASDaUXdoEWqtl4KThPQsVZGM3Rig9Nox0wznRjbqTbTmdo2iZOZOGZIJaCTSLzE0UwZFSmjtW3UVVFAQBFBoVxF5WKBvXz7xz7aFff5nvXc5fd+zezs2ed7fnu+PPDhOef8zvP8zN0F4NjXUOsGAFQHYQcSQdiBRBB2IBGEHUjEgGo+2EBr9kEaUs2HBJJySAd1xA9bX7WSwm5msyT9WFKjpH919zui+w/SEJ1rF5fykAACz/mK3FrRT+PNrFHSXZJmS5oqaZ6ZTS329wGorFJes0+XtNHdN7n7EUlLJc0pT1sAyq2UsI+R9E6vn7dm2z7BzBaYWbuZtXfocAkPB6AUFX833t0Xunubu7c1qbnSDwcgRylh3yZpXK+fx2bbANShUsL+gqTJZjbBzAZKukbSY+VpC0C5FT315u6dZnaTpCfUM/W2yN3Xlq0zAGVV0jy7uy+TtKxMvQCoID4uCySCsAOJIOxAIgg7kAjCDiSCsAOJIOxAIgg7kAjCDiSCsAOJIOxAIgg7kAjCDiSCsAOJIOxAIgg7kAjCDiSCsAOJIOxAIgg7kAjCDiSCsAOJIOxAIgg7kAjCDiSCsAOJIOxAIgg7kAjCDiSipFVcgWOVNQ0M695xpEqdlE9JYTezzZL2S+qS1OnubeVoCkD5lePI/gfuvqcMvwdABfGaHUhEqWF3SU+a2YtmtqCvO5jZAjNrN7P2Dh0u8eEAFKvUp/Ez3X2bmY2StNzM1rv7M73v4O4LJS2UpOHW4iU+HoAilXRkd/dt2fddkh6RNL0cTQEov6LDbmZDzGzYR7clXSZpTbkaA1BepTyNb5X0iJl99Ht+6e6Pl6UrfG5Yc3NcP/203NqB04aHYzuGWFE9ffzY3cHvPi7+3Z0FHnvwzuCXS/rCy7vDevemLbk17+wMxxar6LC7+yZJZ5axFwAVxNQbkAjCDiSCsAOJIOxAIgg7kAhOcT3G2YD4r7hxzOiwfmT8yLC+f3w89bZ7Vv5HpP/qnGXh2FlD1oX1pgIzc+935//Zuz0e3NLYEdZ/c+BLYf0nv/pqWJ9w1/u5ta4974Zji8WRHUgEYQcSQdiBRBB2IBGEHUgEYQcSQdiBRDDPfgxoGDw4t9Y1bXI49q3Z+WMlaealq8P6lSPbw/rZA/OvRdpk8Vz3ps4Cc/hdw8L6CQ0Hc2utjf8bjj2+oTGsXzf89bD+0MxtYd3uHZpfZJ4dQCkIO5AIwg4kgrADiSDsQCIIO5AIwg4kgnn2elBgTrfxhJawvu/C/Ms17/6TeD75hjOeCOuzh8ZLAQxpiC+p/MLhUbm1n269KBz7zrJTw/qIN+JLLu89Pf+ft3/lg3Ds1ZNeDuuXDIv3y54DQ8L6+O79Yb0SOLIDiSDsQCIIO5AIwg4kgrADiSDsQCIIO5AI5tmrocB52wPGjwnrb181NqyfccX63Nrftf5HOHZU44GwvqEjf55ckv7lzcvCesfS1txay5p4rnnc5g1hvdD11ccHn0/Ys2NKOHbtn8fX0x/UEF9Xvvv5L4R137c9rFdCwSO7mS0ys11mtqbXthYzW25mb2TfR1S2TQCl6s/T+MWSZh217RZJK9x9sqQV2c8A6ljBsLv7M5L2HrV5jqQl2e0lkuaWty0A5Vbsa/ZWd//oRccOSbkvzMxsgaQFkjRI8fXOAFROye/Gu7tL8qC+0N3b3L2tSfEFBAFUTrFh32lmoyUp+76rfC0BqIRiw/6YpPnZ7fmSHi1POwAqpeBrdjO7X9JFkkaa2VZJt0m6Q9IDZna9pC2Srq5kk593jZMmhPW3rjkprM+98tmwfu2I53Jrqw7Hc/TfW3dFWO9YGa/PPurlQ2F9+HOv5Na6P/wwHNsVVqWGQYPCugXX0z94cvzZh8tHxtfLX/z2jLA+7vH4fPmuD/aF9UooGHZ3n5dTurjMvQCoID4uCySCsAOJIOxAIgg7kAjCDiSCU1zLoLE1Pg10c4GptRvn/Sasn3fcm2H9u5uvzK1tWh5P+41aFZ+qOfi3+afPSlLXu0efNvFJ4YWmC11Ce+IpYf2dufF+1Yz3c0vXTlwRDj3icTR2PXNyWD9lw6qw7t2FJhbLjyM7kAjCDiSCsAOJIOxAIgg7kAjCDiSCsAOJYJ69n2xA/q7aOXdiOLbQKapzhq4L69etvy6sH1qSP988YUU8R+8fxks6a2w8l314evxnP3hS/n47MD4+zbRjSnwK7M1n/ltYnzf8tdzasoPxHP5tK/M/uyBJUx6PT1HtPnQ4rNcCR3YgEYQdSARhBxJB2IFEEHYgEYQdSARhBxLBPHs/WXP+ajYfXBjPVd964vNh/ecfnBHWd68scO50e/4aHUcmxUsP7516XFw/Oz7v+sRx74X1C07Kn+f/05b8S2BL0jkDC5zvbvGxavG+/HP5v788voT2F++L5/j1yutxvQbnqxfCkR1IBGEHEkHYgUQQdiARhB1IBGEHEkHYgUQwz14GDQ3h1dHVUOD/1C7F53V/eFp8bfeN80/MrQ344v5w7JWT4nPtJzTvDuuHvCmsn3/cxtzapCYPx3YX2G/37Yuv1//3K+fmP/YDR8Kxas8/F16qzXXfS1XwyG5mi8xsl5mt6bXtdjPbZmarsq/LK9smgFL152n8Ykmz+th+p7tPy76WlbctAOVWMOzu/oykeI0fAHWvlDfobjKzV7On+SPy7mRmC8ys3czaO1R/1+UCUlFs2O+WNFHSNEnbJf0w747uvtDd29y9rUn5J5MAqKyiwu7uO929y927Jf1M0vTytgWg3IoKu5n1Pm/yCklr8u4LoD4UnGc3s/slXSRppJltlXSbpIvMbJokl7RZ0g2Va7E++JH8ue4Bq4eGY+/68pfC+pXDXgnrX70s/r+0OZim39QZ9/aDd/qaaPl/v3wrftJmjfFnDHROfqm1Mb5e/hOH4nn07z8Zn5N++qL8zxj42vz5f+nzOY9eSMGwu/u8PjbfU4FeAFQQH5cFEkHYgUQQdiARhB1IBGEHEsEprv3knflTb6c+sCMcu9j/KKw//ofxpaSnjdga1o9rzO9t6dq2cOykO+PTZycPKHB67TWDw/qYpvxLTb/fHR9r/vq3V4X1SUsPhfXuVzcExWNvaq0QjuxAIgg7kAjCDiSCsAOJIOxAIgg7kAjCDiSCefb+8vzLHndtfCscesrd8bLGtvT4sL6ueWJYl+Wf4zrlg53hUD94MKzvmTs1rF914X+H9XOb8z+D8NP3zg3HjnpiYFhveL49rB+Lp6mWgiM7kAjCDiSCsAOJIOxAIgg7kAjCDiSCsAOJYJ69HII5eEnqei+eZ1ehegkaBsfnm++f/Tthfew34ksuX9/yn2F92cFJubUHH7owHDvh6S1hvbOzM6zjkziyA4kg7EAiCDuQCMIOJIKwA4kg7EAiCDuQCObZjwXB+eyHZ8TLRfs3d4f1W8YtC+svHBof1v/p0fxllSc9vCcc27ntf8I6PpuCR3YzG2dmK83sNTNba2Y3Z9tbzGy5mb2RfR9R+XYBFKs/T+M7JX3H3adKOk/SjWY2VdItkla4+2RJK7KfAdSpgmF39+3u/lJ2e7+kdZLGSJojaUl2tyWS5laoRwBl8Jles5vZqZLOkvScpFZ3356VdkhqzRmzQNICSRqk+HPaACqn3+/Gm9lQSQ9L+ra77+tdc3eX1OfZIO6+0N3b3L2tSc0lNQugeP0Ku5k1qSfov3D3X2ebd5rZ6Kw+WtKuyrQIoBwKPo03M5N0j6R17v6jXqXHJM2XdEf2/dGKdAjZgPivqfsr+Us+v3VN/rScJP3jhJVhffWhcWH9H57+Wlif8uC+3FrX+jfDsYVOHcZn05/X7OdL+rqk1Wa2Ktt2q3pC/oCZXS9pi6SrK9IhgLIoGHZ3f1ZS3uHh4vK2A6BS+LgskAjCDiSCsAOJIOxAIgg7kAhOcf0c6LjgzLC++RvdubW/nPbv4dh3u4aG9TtfviSsT3iowLLIa4JLUbOkclVxZAcSQdiBRBB2IBGEHUgEYQcSQdiBRBB2IBHMs9eBAWPHhPVNswaG9Z/PuDu3tqPz+HDs3744N6yPfjh+7EHt68N61+HDYR3Vw5EdSARhBxJB2IFEEHYgEYQdSARhBxJB2IFEMM9eBY3Dh4f1XZfGyx7PvGBNWD+x4cPc2m1b5oRjWx8cFNaHPbUurHfty78uPOoLR3YgEYQdSARhBxJB2IFEEHYgEYQdSARhBxLRn/XZx0m6V1KrJJe00N1/bGa3S/qmpN3ZXW9192WVajRrJr/U2BgO9e4Ca32XcA3zht89PazvPG9EPP5r74b1741+PKwv3vt7ubV9D50cjm19am1YZx792NGfD9V0SvqOu79kZsMkvWhmy7Pane7+g8q1B6Bc+rM++3ZJ27Pb+81snaT40ioA6s5nes1uZqdKOkvSc9mmm8zsVTNbZGZ9Plc1swVm1m5m7R3iEkVArfQ77GY2VNLDkr7t7vsk3S1poqRp6jny/7Cvce6+0N3b3L2tSc2ldwygKP0Ku5k1qSfov3D3X0uSu+909y5375b0M0nTK9cmgFIVDLuZmaR7JK1z9x/12j66192ukBSfmgWgpvrzbvz5kr4uabWZrcq23SppnplNU8903GZJN1Sgv0+y/P+brLnAS4TOzrjelT+tJ0l2xuTc2vq/GBaO/dbMJ8P6Hw9dHdY3dbSE9V89PSO3NuWhDeFYptbS0Z9345+V1FcSKjunDqCs+AQdkAjCDiSCsAOJIOxAIgg7kAjCDiTi83Upae/OLxVaGjiYo5ekhsGDw/rbs/JPU73krFfCsb8/+PWwvt+bwvq3/uvasH76wr25ta6974VjkQ6O7EAiCDuQCMIOJIKwA4kg7EAiCDuQCMIOJMLcC1xiuZwPZrZb0pZem0ZK2lO1Bj6beu2tXvuS6K1Y5eztFHc/sa9CVcP+qQc3a3f3tpo1EKjX3uq1L4neilWt3ngaDySCsAOJqHXYF9b48SP12lu99iXRW7Gq0ltNX7MDqJ5aH9kBVAlhBxJRk7Cb2Swz22BmG83sllr0kMfMNpvZajNbZWbtNe5lkZntMrM1vba1mNlyM3sj+x6vB13d3m43s23ZvltlZpfXqLdxZrbSzF4zs7VmdnO2vab7LuirKvut6q/ZzaxR0uuSLpW0VdILkua5+2tVbSSHmW2W1ObuNf8AhpldIOmApHvd/cvZtn+WtNfd78j+oxzh7t+tk95ul3Sg1st4Z6sVje69zLikuZL+TDXcd0FfV6sK+60WR/bpkja6+yZ3PyJpqaQ5Neij7rn7M5KOvgzNHElLsttL1POPpepyeqsL7r7d3V/Kbu+X9NEy4zXdd0FfVVGLsI+R9E6vn7eqvtZ7d0lPmtmLZrag1s30odXdt2e3d0hqrWUzfSi4jHc1HbXMeN3su2KWPy8Vb9B92kx3P1vSbEk3Zk9X65L3vAarp7nTfi3jXS19LDP+sVruu2KXPy9VLcK+TdK4Xj+PzbbVBXffln3fJekR1d9S1Ds/WkE3+76rxv18rJ6W8e5rmXHVwb6r5fLntQj7C5Imm9kEMxso6RpJj9Wgj08xsyHZGycysyGSLlP9LUX9mKT52e35kh6tYS+fUC/LeOctM64a77uaL3/u7lX/knS5et6Rf1PS39Sih5y+TpP0Sva1tta9SbpfPU/rOtTz3sb1kk6QtELSG5KektRSR73dJ2m1pFfVE6zRNeptpnqeor8qaVX2dXmt913QV1X2Gx+XBRLBG3RAIgg7kAjCDiSCsAOJIOxAIgg7kAjCDiTi/wDANP9l39CStAAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "from PIL import Image\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt # plt 用于显示图片\n",
    "import matplotlib.image as mpimg # mpimg 用于读取图片\n",
    "# image =Image.open('1.jpg')\n",
    "# image =Image.open('0.jpg')\n",
    "# image =Image.open('4.jpg')\n",
    "image =Image.open('9.jpg')\n",
    "image = image.convert('L')\n",
    "image = image.resize((28,28))\n",
    "plt.imshow(image)\n",
    "plt.show\n",
    "image = np.asarray(image)#转化成数组以后，iamges中存储的是图片的像素值。\n",
    "print(image.shape)\n",
    "image = image.reshape(1,image.shape[0], image.shape[1], 1)\n",
    "\n",
    "print(image.shape)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[0. 0. 0. 0. 0. 0. 0. 0. 0. 1.]]\n"
     ]
    }
   ],
   "source": [
    "#prediction\n",
    "\n",
    "prediction = model.predict(image)\n",
    "print(prediction)"
   ]
  }
 ],
 "metadata": {
  "interpreter": {
   "hash": "dcf5988c2a920ee14255ac22801cf5013c9e1c5a6e62f5559e16b96482b8a6f8"
  },
  "kernelspec": {
   "display_name": "Python 3.6.13 64-bit ('pytorch': conda)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.13"
  },
  "orig_nbformat": 4
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
