{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 结合estimator_template中的说明 "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'1.13.1'"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tf.__version__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "def create_model(features,feature_columns,hidden_units,output_cls):\n",
    "    inputs=tf.feature_column.input_layer(features=features,feature_columns=feature_columns)\n",
    "    \n",
    "    # define hidden layer\n",
    "    for units in hidden_units:\n",
    "        inputs=tf.layers.dense(inputs=inputs,units=units,activation=tf.nn.relu)\n",
    "    # define output layer\n",
    "    logits=tf.layers.dense(inputs=inputs,units=output_cls)\n",
    "    return logits   \n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "code_folding": []
   },
   "outputs": [],
   "source": [
    "def model_fn_builder(lr):\n",
    "    \n",
    "    def model_fn(features, labels, mode, params,config):\n",
    "        \n",
    "        # get unscaled data\n",
    "        logits=create_model(features,params['feature_columns'],params['hidden_units'],params['output_cls'])\n",
    "        \n",
    "        predict_cls=tf.math.argmax(input=logits,axis=1)\n",
    "        predict_pro=tf.nn.softmax(logits=logits,axis=1)\n",
    "        # is_predict=true: predict,else train or \n",
    "        is_predict=(mode==tf.estimator.ModeKeys.PREDICT)\n",
    "        \n",
    "        if not is_predict:\n",
    "            # train .eval\n",
    "            loss=tf.losses.sparse_softmax_cross_entropy(labels=labels,logits=logits)\n",
    "               \n",
    "            def metric_fn(labels,predictions):\n",
    "                '''\n",
    "                define metrics\n",
    "                '''\n",
    "                accuracy,accuracy_update=tf.metrics.accuracy(labels=labels,predictions=predictions,name='iris_accuracy')\n",
    "#                 auc=tf.metrics.auc(labels=labels,predictions=predictions,name='iris_auc')\n",
    "                recall,recall_update=tf.metrics.recall(labels=labels,predictions=predictions,name='iris_recall')\n",
    "                precision,precision_update=tf.metrics.precision(labels=labels,predictions=predictions,name='iris_precision')\n",
    "#                 with tf.control_dependencies([recall,precision]):\n",
    "#                     f1_score=f1(recall=recall,precision=precision)\n",
    "                \n",
    "                return {\n",
    "                    'accuracy':(accuracy,accuracy_update),\n",
    "#                     'auc':auc,\n",
    "                    'recall':(recall,recall_update),\n",
    "                    'precision':(precision,precision_update)\n",
    "#                     'f1_score':f1_score                    \n",
    "                }\n",
    "                    \n",
    "                \n",
    "            if mode==tf.estimator.ModeKeys.EVAL:\n",
    "                return tf.estimator.EstimatorSpec(mode=mode,loss=loss,eval_metric_ops=metric_fn(labels,predict_cls))\n",
    "            \n",
    "            # train process\n",
    "            train_op=tf.train.AdamOptimizer(learning_rate=lr).minimize(loss=loss,global_step=tf.train.get_global_step())\n",
    "            return tf.estimator.EstimatorSpec(mode=mode,loss=loss,train_op=train_op,eval_metric_ops=metric_fn(labels,predict_cls))\n",
    "                \n",
    "        \n",
    "        else:\n",
    "            predictions={'predict_cls':predict_cls,'predict_pro':predict_pro}\n",
    "            return tf.estimator.EstimatorSpec(mode=mode,predictions=predictions)     \n",
    "    return model_fn"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "TRAIN_URL = \"http://download.tensorflow.org/data/iris_training.csv\"\n",
    "TEST_URL = \"http://download.tensorflow.org/data/iris_test.csv\"\n",
    "def downloadfiles():\n",
    "    train_path = tf.keras.utils.get_file(r'F:\\testDemo\\AI\\estimator\\data\\iris\\train.csv', TRAIN_URL)\n",
    "    test_path = tf.keras.utils.get_file(r'F:\\testDemo\\AI\\estimator\\data\\\\iris\\test.csv', TEST_URL)\n",
    "    return train_path,test_path"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "F:\\testDemo\\AI\\estimator\\data\\iris\\train.csv\n",
      "F:\\testDemo\\AI\\estimator\\data\\\\iris\\test.csv\n"
     ]
    }
   ],
   "source": [
    "train_path,test_path=downloadfiles()\n",
    "print(train_path)\n",
    "print(test_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "CSV_TYPES=[[0.0], [0.0], [0.0], [0.0], [0]]\n",
    "CSV_COLUMN_NAMES = ['SepalLength', 'SepalWidth',\n",
    "                    'PetalLength', 'PetalWidth', 'label']\n",
    "label = ['Setosa', 'Versicolor', 'Virginica']\n",
    "BATCH_SIZE=16\n",
    "EPOCHS=200"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def train_input_fn_builder(file_path,batch_size,epochs):\n",
    "    '''\n",
    "    创建 输入函数闭包\n",
    "    '''\n",
    "    def parse_line(line):\n",
    "        '''\n",
    "        parse csv line to features fromat\n",
    "        '''\n",
    "        CSV_TYPES=[[0.0], [0.0], [0.0], [0.0], [0]]\n",
    "        fileds=tf.decode_csv(line,record_defaults=CSV_TYPES)\n",
    "        features=dict(zip(CSV_COLUMN_NAMES,fileds))\n",
    "        label=features.pop('label')\n",
    "        return features,label\n",
    "        \n",
    "    # 可以执行其它操作\n",
    "    \n",
    "    def input_fn():\n",
    "        \n",
    "        dataset=tf.data.TextLineDataset(file_path).skip(1)\n",
    "        dataset=dataset.map(parse_line)\n",
    "        dataset=dataset.shuffle(1000).repeat(epochs).batch(batch_size)\n",
    "        return dataset # 返回的 顺序要和 model_fn一致 或者 dataset元素 格式为（features,label）元组 也可以\n",
    "    return input_fn"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "model_dir=r'F:\\testDemo\\AI\\estimator\\model\\iris'\n",
    "params={}\n",
    "feature_columns=[]\n",
    "for i in range(len(CSV_COLUMN_NAMES)-1):\n",
    "    feature_columns.append(tf.feature_column.numeric_column(CSV_COLUMN_NAMES[i]))\n",
    "hidden_units=[128,256,256]\n",
    "output_cls=len(label)\n",
    "params['feature_columns']=feature_columns\n",
    "params['hidden_units']=hidden_units\n",
    "params['output_cls']=output_cls\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[NumericColumn(key='SepalLength', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None),\n",
       " NumericColumn(key='SepalWidth', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None),\n",
       " NumericColumn(key='PetalLength', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None),\n",
       " NumericColumn(key='PetalWidth', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None)]"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "feature_columns"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Using config: {'_model_dir': 'F:\\\\testDemo\\\\AI\\\\estimator\\\\model\\\\iris', '_tf_random_seed': None, '_save_summary_steps': 100, '_save_checkpoints_steps': 100, '_save_checkpoints_secs': None, '_session_config': allow_soft_placement: true\n",
      "graph_options {\n",
      "  rewrite_options {\n",
      "    meta_optimizer_iterations: ONE\n",
      "  }\n",
      "}\n",
      ", '_keep_checkpoint_max': 5, '_keep_checkpoint_every_n_hours': 10000, '_log_step_count_steps': 100, '_train_distribute': None, '_device_fn': None, '_protocol': None, '_eval_distribute': None, '_experimental_distribute': None, '_service': None, '_cluster_spec': <tensorflow.python.training.server_lib.ClusterSpec object at 0x000001675E013BA8>, '_task_type': 'worker', '_task_id': 0, '_global_id_in_cluster': 0, '_master': '', '_evaluation_master': '', '_is_chief': True, '_num_ps_replicas': 0, '_num_worker_replicas': 1}\n"
     ]
    }
   ],
   "source": [
    "config=tf.estimator.RunConfig(save_checkpoints_steps=100)\n",
    "\n",
    "estimator=tf.estimator.Estimator(model_fn=model_fn_builder(0.001),model_dir=model_dir,params=params,config=config)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From E:\\Python\\virtualenv\\NER\\lib\\site-packages\\tensorflow\\python\\framework\\op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Colocations handled automatically by placer.\n",
      "INFO:tensorflow:Calling model_fn.\n",
      "WARNING:tensorflow:From E:\\Python\\virtualenv\\NER\\lib\\site-packages\\tensorflow\\python\\feature_column\\feature_column.py:205: NumericColumn._get_dense_tensor (from tensorflow.python.feature_column.feature_column_v2) is deprecated and will be removed after 2018-11-30.\n",
      "Instructions for updating:\n",
      "The old _FeatureColumn APIs are being deprecated. Please use the new FeatureColumn APIs instead.\n",
      "WARNING:tensorflow:From E:\\Python\\virtualenv\\NER\\lib\\site-packages\\tensorflow\\python\\feature_column\\feature_column.py:2121: NumericColumn._transform_feature (from tensorflow.python.feature_column.feature_column_v2) is deprecated and will be removed after 2018-11-30.\n",
      "Instructions for updating:\n",
      "The old _FeatureColumn APIs are being deprecated. Please use the new FeatureColumn APIs instead.\n",
      "WARNING:tensorflow:From E:\\Python\\virtualenv\\NER\\lib\\site-packages\\tensorflow\\python\\feature_column\\feature_column_v2.py:2703: to_float (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use tf.cast instead.\n",
      "WARNING:tensorflow:From E:\\Python\\virtualenv\\NER\\lib\\site-packages\\tensorflow\\python\\feature_column\\feature_column.py:206: NumericColumn._variable_shape (from tensorflow.python.feature_column.feature_column_v2) is deprecated and will be removed after 2018-11-30.\n",
      "Instructions for updating:\n",
      "The old _FeatureColumn APIs are being deprecated. Please use the new FeatureColumn APIs instead.\n",
      "WARNING:tensorflow:From <ipython-input-3-e18708218f55>:6: dense (from tensorflow.python.layers.core) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use keras.layers.dense instead.\n",
      "WARNING:tensorflow:From E:\\Python\\virtualenv\\NER\\lib\\site-packages\\tensorflow\\python\\ops\\metrics_impl.py:2176: div (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Deprecated in favor of operator or tf.math.divide.\n",
      "INFO:tensorflow:Done calling model_fn.\n",
      "INFO:tensorflow:Create CheckpointSaverHook.\n",
      "INFO:tensorflow:Graph was finalized.\n",
      "INFO:tensorflow:Running local_init_op.\n",
      "INFO:tensorflow:Done running local_init_op.\n",
      "INFO:tensorflow:Saving checkpoints for 0 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:loss = 1.3218495, step = 1\n",
      "INFO:tensorflow:Saving checkpoints for 100 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 190.777\n",
      "INFO:tensorflow:loss = 0.037931487, step = 101 (0.526 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 200 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 173.126\n",
      "INFO:tensorflow:loss = 0.054895025, step = 201 (0.577 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 300 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 189.752\n",
      "INFO:tensorflow:loss = 0.037694715, step = 301 (0.527 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 400 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 224.966\n",
      "INFO:tensorflow:loss = 0.28153986, step = 401 (0.450 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 500 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "WARNING:tensorflow:From E:\\Python\\virtualenv\\NER\\lib\\site-packages\\tensorflow\\python\\training\\saver.py:966: remove_checkpoint (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use standard file APIs to delete files with this prefix.\n",
      "INFO:tensorflow:global_step/sec: 104.268\n",
      "INFO:tensorflow:loss = 0.039843455, step = 501 (0.954 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 600 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 216.766\n",
      "INFO:tensorflow:loss = 0.031205095, step = 601 (0.462 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 700 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 203.251\n",
      "INFO:tensorflow:loss = 0.23051858, step = 701 (0.492 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 800 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 203.95\n",
      "INFO:tensorflow:loss = 0.20566535, step = 801 (0.491 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 900 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 201.206\n",
      "INFO:tensorflow:loss = 0.010542911, step = 901 (0.496 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 1000 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 148.476\n",
      "INFO:tensorflow:loss = 0.26764137, step = 1001 (0.674 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 1100 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 227.628\n",
      "INFO:tensorflow:loss = 0.050469868, step = 1101 (0.439 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 1200 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 140.26\n",
      "INFO:tensorflow:loss = 0.024425946, step = 1201 (0.713 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 1300 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 185.075\n",
      "INFO:tensorflow:loss = 0.0046257777, step = 1301 (0.540 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 1400 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:global_step/sec: 190.318\n",
      "INFO:tensorflow:loss = 0.06876993, step = 1401 (0.524 sec)\n",
      "INFO:tensorflow:Saving checkpoints for 1500 into F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt.\n",
      "INFO:tensorflow:Loss for final step: 0.0028557044.\n"
     ]
    }
   ],
   "source": [
    "train=estimator.train(input_fn=train_input_fn_builder(file_path=train_path,batch_size=BATCH_SIZE,epochs=EPOCHS),steps=10000)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "def eval_input_fn_builder(file_path,batch_size):\n",
    "    '''\n",
    "    创建 输入函数闭包\n",
    "    '''\n",
    "    def parse_line(line):\n",
    "        '''\n",
    "        parse csv line to features fromat\n",
    "        '''\n",
    "        CSV_TYPES=[[0.0], [0.0], [0.0], [0.0], [0]]\n",
    "        fileds=tf.decode_csv(line,record_defaults=CSV_TYPES)\n",
    "        features=dict(zip(CSV_COLUMN_NAMES,fileds))\n",
    "        label=features.pop('label')\n",
    "        return features,label\n",
    "        \n",
    "    # 可以执行其它操作\n",
    "    \n",
    "    def input_fn():\n",
    "        \n",
    "        dataset=tf.data.TextLineDataset(file_path).skip(1)\n",
    "        dataset=dataset.map(parse_line)\n",
    "        dataset=dataset.batch(batch_size)\n",
    "#         dataset=dataset.shuffle(1000).repeat(epochs).batch(batch_size)\n",
    "        return dataset # 返回的 顺序要和 model_fn一致 或者 dataset元素 格式为（features,label）元组 也可以\n",
    "    return input_fn"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Calling model_fn.\n",
      "INFO:tensorflow:Done calling model_fn.\n",
      "INFO:tensorflow:Starting evaluation at 2019-05-29T12:37:42Z\n",
      "INFO:tensorflow:Graph was finalized.\n",
      "WARNING:tensorflow:From E:\\Python\\virtualenv\\NER\\lib\\site-packages\\tensorflow\\python\\training\\saver.py:1266: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use standard file APIs to check for files with this prefix.\n",
      "INFO:tensorflow:Restoring parameters from F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt-1500\n",
      "INFO:tensorflow:Running local_init_op.\n",
      "INFO:tensorflow:Done running local_init_op.\n",
      "INFO:tensorflow:Finished evaluation at 2019-05-29-12:37:42\n",
      "INFO:tensorflow:Saving dict for global step 1500: accuracy = 0.96666664, global_step = 1500, loss = 0.054396607, precision = 1.0, recall = 1.0\n",
      "INFO:tensorflow:Saving 'checkpoint_path' summary for global step 1500: F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt-1500\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "{'accuracy': 0.96666664,\n",
       " 'loss': 0.054396607,\n",
       " 'precision': 1.0,\n",
       " 'recall': 1.0,\n",
       " 'global_step': 1500}"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "estimator.evaluate(input_fn=eval_input_fn_builder(test_path,16))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "def serving_input_receiver_fn():\n",
    "    input_str=tf.placeholder(tf.string,name='input_image')\n",
    "    # 在这里的处理方式，根据 输入的不同，处理方式 会不同，我这里只是demo\n",
    "    tt=tf.string_split(input_str,',').values\n",
    "    SepalLength=tf.string_to_number([tt[0]],tf.float32)\n",
    "    SepalWidth=tf.string_to_number([tt[1]],tf.float32)\n",
    "    PetalLength=tf.string_to_number([tt[2]],tf.float32)\n",
    "    PetalWidth=tf.string_to_number([tt[3]],tf.float32)\n",
    "    \n",
    "    \n",
    "    receiver_tensors = {'inputs': input_str}\n",
    "   \n",
    "    features = {\n",
    "       'images': SepalLength\n",
    "          \n",
    "    }   \n",
    "    \n",
    "    return tf.estimator.export.ServingInputReceiver(features,receiver_tensors)\n",
    "    \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Calling model_fn.\n",
      "INFO:tensorflow:Done calling model_fn.\n",
      "WARNING:tensorflow:From E:\\Python\\virtualenv\\NER\\lib\\site-packages\\tensorflow\\python\\saved_model\\signature_def_utils_impl.py:205: build_tensor_info (from tensorflow.python.saved_model.utils_impl) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "This function will only be available through the v1 compatibility library as tf.compat.v1.saved_model.utils.build_tensor_info or tf.compat.v1.saved_model.build_tensor_info.\n",
      "INFO:tensorflow:Signatures INCLUDED in export for Classify: None\n",
      "INFO:tensorflow:Signatures INCLUDED in export for Regress: None\n",
      "INFO:tensorflow:Signatures INCLUDED in export for Predict: ['serving_default']\n",
      "INFO:tensorflow:Signatures INCLUDED in export for Train: None\n",
      "INFO:tensorflow:Signatures INCLUDED in export for Eval: None\n",
      "INFO:tensorflow:Restoring parameters from F:\\testDemo\\AI\\estimator\\model\\iris\\model.ckpt-1500\n",
      "INFO:tensorflow:Assets added to graph.\n",
      "INFO:tensorflow:No assets to write.\n",
      "INFO:tensorflow:SavedModel written to: export_base/iris\\temp-b'1559133463'\\saved_model.pb\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "b'export_base/iris\\\\1559133463'"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "estimator.export_savedmodel('export_base/fashion',serving_input_receiver_fn=serving_input_receiver_fn)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "py3_NER",
   "language": "python",
   "name": "ner"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
