{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "from tqdm.notebook import tqdm\n",
    "from sklearn.preprocessing import MinMaxScaler"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "数据总量 (34416, 33)\n"
     ]
    }
   ],
   "source": [
    "file_dir = './pv_data/train_data_1.csv'\n",
    "file_dir_2 = './pv_data/train_data.csv'\n",
    "\n",
    "df = pd.read_csv(file_dir)\n",
    "#df = df[all_data_frame['ycsb']=='202106300288GF']\n",
    "print('数据总量',df.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 特征选择\n",
    "NUMERIC_COLS = ['gfrl',\n",
    "                'cur_a','cur_b','cur_c',\n",
    "                'vol_a','vol_b','vol_c',\n",
    "                'p', 'q', 'szgl', 'hgl', 'glys',\n",
    "                'low_tp', 'high_tp', 'avg_tp', \n",
    "                '地面气压(hPa)', '气温2m(℃)', '地表温度(℃)', '相对湿度(%)', '风速','总云量(tcc)', '净日照强度(net,J/m2)' \n",
    "                ]\n",
    "\n",
    "# 离散特征 ['sbbm', 'ycsb', 'hour']\n",
    "\n",
    "# 舍弃特征\n",
    "IGNORE_COLS = [\"index\", \"data_date\", \"time\", \"yhmc\", \"fdgl\", \"drfdl\", 'wind','weather']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 连续型特征 数值归一化\n",
    "selected_features = ['gfrl',\n",
    "                'cur_a','cur_b','cur_c',\n",
    "                'vol_a','vol_b','vol_c',\n",
    "                'p', 'q', 'szgl', 'hgl', 'glys',\n",
    "                'low_tp', 'high_tp', 'avg_tp', \n",
    "                '地面气压(hPa)', '气温2m(℃)', '地表温度(℃)', '相对湿度(%)', '风速','总云量(tcc)', '净日照强度(net,J/m2)']\n",
    "\n",
    "scaled_data = df[selected_features]\n",
    "scaler = MinMaxScaler()\n",
    "scaled_data = pd.DataFrame(scaler.fit_transform(scaled_data),columns=scaled_data.keys())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 合并数据\n",
    "df.drop(selected_features,axis=1,inplace=True)\n",
    "df = pd.concat([df,scaled_data], axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "测试数据总量 (144, 33)\n"
     ]
    }
   ],
   "source": [
    "dfTest = df[df['time']==20211114]\n",
    "print('测试数据总量',dfTest.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练数据总量 (34272, 33)\n"
     ]
    }
   ],
   "source": [
    "dfTrain = df[df['time']!=20211114]\n",
    "print('训练数据总量',dfTrain.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 打乱数据\n",
    "#dfTrain = dfTrain.sample(frac=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 切分数据 训练数据/测试数据\n",
    "# dfTrain = df\n",
    "# dfTest = df[30000:] "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "#更具特征列 生成特征词典\n",
    "feature_dict = {}\n",
    "total_feature = 0\n",
    "for col in df.columns:\n",
    "    if col in IGNORE_COLS:\n",
    "        continue\n",
    "    elif col in NUMERIC_COLS:\n",
    "        feature_dict[col] = total_feature\n",
    "        total_feature += 1\n",
    "    else:\n",
    "        unique_val = df[col].unique()\n",
    "        feature_dict[col] = dict(zip(unique_val,range(total_feature,len(unique_val) + total_feature)))\n",
    "        total_feature += len(unique_val)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "dfTrain = pd.concat([dfTrain, dfTrain])\n",
    "\n",
    "dfTrain = pd.concat([dfTrain, dfTrain])\n",
    "\n",
    "dfTrain = pd.concat([dfTrain, dfTrain])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "\"\"\"\n",
    "对训练集进行转化\n",
    "\"\"\"\n",
    "#生成标签列\n",
    "train_y = dfTrain[['fdgl']].values.tolist()\n",
    "\n",
    "dfTrain.drop(['fdgl','index'],axis=1,inplace=True)\n",
    "\n",
    "train_feature_index = dfTrain.copy()\n",
    "\n",
    "train_feature_value = dfTrain.copy()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "#生成特征索引矩阵与特征值矩阵\n",
    "for col in train_feature_index.columns:\n",
    "    if col in IGNORE_COLS:\n",
    "        train_feature_index.drop(col,axis=1,inplace=True)\n",
    "        train_feature_value.drop(col,axis=1,inplace=True)\n",
    "        continue\n",
    "    elif col in NUMERIC_COLS:\n",
    "        train_feature_index[col] = feature_dict[col]\n",
    "    else:\n",
    "        train_feature_index[col] = train_feature_index[col].map(feature_dict[col])\n",
    "        train_feature_value[col] = 1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\pandas\\core\\frame.py:4913: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame\n",
      "\n",
      "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
      "  errors=errors,\n"
     ]
    }
   ],
   "source": [
    "\"\"\"\n",
    "对测试集进行转化\n",
    "\"\"\"\n",
    "test_ids = dfTest['index'].values.tolist()\n",
    "dfTest.drop(['index'],axis=1,inplace=True)\n",
    "\n",
    "test_feature_index = dfTest.copy()\n",
    "test_feature_value = dfTest.copy()\n",
    "\n",
    "for col in test_feature_index.columns:\n",
    "    if col in IGNORE_COLS:\n",
    "        test_feature_index.drop(col,axis=1,inplace=True)\n",
    "        test_feature_value.drop(col,axis=1,inplace=True)\n",
    "        continue\n",
    "    elif col in NUMERIC_COLS:\n",
    "        test_feature_index[col] = feature_dict[col]\n",
    "    else:\n",
    "        test_feature_index[col] = test_feature_index[col].map(feature_dict[col])\n",
    "        test_feature_value[col] = 1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "#test_feature_index.to_csv('./data/test_index.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "#test_feature_value.to_csv('./data/test_value.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "#dfTest.to_csv('./data/label.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:516: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:517: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:518: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:519: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:520: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:525: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorboard\\compat\\tensorflow_stub\\dtypes.py:541: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorboard\\compat\\tensorflow_stub\\dtypes.py:542: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorboard\\compat\\tensorflow_stub\\dtypes.py:543: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorboard\\compat\\tensorflow_stub\\dtypes.py:544: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorboard\\compat\\tensorflow_stub\\dtypes.py:545: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n",
      "D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorboard\\compat\\tensorflow_stub\\dtypes.py:550: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n",
      "  np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "\"\"\"模型参数\"\"\"\n",
    "dfm_params = {\n",
    "    \"use_fm\":True,\n",
    "    \"use_deep\":True,\n",
    "    \"embedding_size\":8,\n",
    "    \"dropout_fm\":[0.1,0.1],\n",
    "    \"deep_layers\":[128,64],\n",
    "    \n",
    "    \"dropout_deep\":[0.1,0.1,0.1],\n",
    "    \"deep_layer_activation\":tf.nn.relu,\n",
    "    \"epoch\":500,\n",
    "    \"batch_size\":16,\n",
    "    \"learning_rate\":0.01,\n",
    "    \"optimizer\":\"adam\",\n",
    "    \"batch_norm\":0.5,\n",
    "    \"batch_norm_decay\":0.5,\n",
    "    \"l2_reg\":0,\n",
    "    \"verbose\":True,\n",
    "    \"eval_metric\":'gini_norm',\n",
    "    \"random_seed\":1\n",
    "}\n",
    "dfm_params['feature_size'] = total_feature\n",
    "dfm_params['field_size'] = len(train_feature_index.columns)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From C:\\Users\\S\\AppData\\Local\\Temp/ipykernel_14420/2316900038.py:3: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n",
      "\n",
      "WARNING:tensorflow:From C:\\Users\\S\\AppData\\Local\\Temp/ipykernel_14420/2316900038.py:13: The name tf.random_normal is deprecated. Please use tf.random.normal instead.\n",
      "\n"
     ]
    }
   ],
   "source": [
    "\"\"\"开始建立模型\"\"\"\n",
    "#初始化占位变量 索引/值/标签\n",
    "feat_index = tf.placeholder(tf.int32,shape=[None,None],name='feat_index')\n",
    "feat_value = tf.placeholder(tf.float32,shape=[None,None],name='feat_value')\n",
    "label = tf.placeholder(tf.float32,shape=[None,1],name='label')\n",
    "\n",
    "\"\"\"建立weights\"\"\"\n",
    "weights = dict()\n",
    " \n",
    "# embeddings\n",
    "# 初始化embedding变量 大小为特征数量 * embedding大小\n",
    "weights['feature_embeddings'] = tf.Variable(\n",
    "                                tf.random_normal([dfm_params['feature_size'],\n",
    "                                dfm_params['embedding_size']],0.0,0.01),\n",
    "                                name='feature_embeddings')\n",
    "\n",
    "# embedding权重偏移量初始化 大小为特征数量 * 1\n",
    "weights['feature_bias'] = tf.Variable(tf.random_normal([dfm_params['feature_size'],1],0.0,1.0),name='feature_bias')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "2"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# deep layers\n",
    "num_layer = len(dfm_params['deep_layers'])\n",
    "num_layer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# 输入大小 块大小*embedding大小 25*16\n",
    "input_size = dfm_params['field_size'] * dfm_params['embedding_size']\n",
    "\n",
    "glorot = np.sqrt(2.0/(input_size + dfm_params['deep_layers'][0]))\n",
    "\n",
    "weights['layer_0'] = tf.Variable(\n",
    "    np.random.normal(loc=0,scale=glorot,size=(input_size,dfm_params['deep_layers'][0])),dtype=np.float32\n",
    ")\n",
    "weights['bias_0'] = tf.Variable(\n",
    "    np.random.normal(loc=0,scale=glorot,size=(1,dfm_params['deep_layers'][0])),dtype=np.float32\n",
    ")\n",
    "\n",
    "for i in range(1,num_layer):\n",
    "    glorot = np.sqrt(2.0 / (dfm_params['deep_layers'][i - 1] + dfm_params['deep_layers'][i]))\n",
    "    weights[\"layer_%d\" % i] = tf.Variable(\n",
    "        np.random.normal(loc=0, scale=glorot, size=(dfm_params['deep_layers'][i - 1], dfm_params['deep_layers'][i])),\n",
    "        dtype=np.float32)  # layers[i-1] * layers[i]\n",
    "    weights[\"bias_%d\" % i] = tf.Variable(\n",
    "        np.random.normal(loc=0, scale=glorot, size=(1, dfm_params['deep_layers'][i])),\n",
    "        dtype=np.float32)  # 1 * layer[i]\n",
    "\n",
    "# final concat projection layer\n",
    "\n",
    "if dfm_params['use_fm'] and dfm_params['use_deep']:\n",
    "    input_size = dfm_params['field_size'] + dfm_params['embedding_size'] + dfm_params['deep_layers'][-1]\n",
    "elif dfm_params['use_fm']:\n",
    "    input_size = dfm_params['field_size'] + dfm_params['embedding_size']\n",
    "elif dfm_params['use_deep']:\n",
    "    input_size = dfm_params['deep_layers'][-1]\n",
    "\n",
    "glorot = np.sqrt(2.0/(input_size + 1))\n",
    "weights['concat_projection'] = tf.Variable(np.random.normal(loc=0,scale=glorot,size=(input_size,1)),dtype=np.float32)\n",
    "weights['concat_bias'] = tf.Variable(tf.constant(0.01),dtype=np.float32)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "\"\"\"embedding\"\"\"\n",
    "embeddings = tf.nn.embedding_lookup(weights['feature_embeddings'],feat_index)\n",
    "\n",
    "reshaped_feat_value = tf.reshape(feat_value,shape=[-1,dfm_params['field_size'],1])\n",
    "\n",
    "embeddings = tf.multiply(embeddings,reshaped_feat_value)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": [
    "\"\"\"fm part\"\"\"\n",
    "fm_first_order = tf.nn.embedding_lookup(weights['feature_bias'],feat_index)\n",
    "fm_first_order = tf.reduce_sum(tf.multiply(fm_first_order,reshaped_feat_value),2)\n",
    "\n",
    "summed_features_emb = tf.reduce_sum(embeddings,1)\n",
    "summed_features_emb_square = tf.square(summed_features_emb)\n",
    "\n",
    "squared_features_emb = tf.square(embeddings)\n",
    "squared_sum_features_emb = tf.reduce_sum(squared_features_emb,1)\n",
    "\n",
    "fm_second_order = 0.5 * tf.subtract(summed_features_emb_square,squared_sum_features_emb)\n",
    "\n",
    "\"\"\"deep part\"\"\"\n",
    "y_deep = tf.reshape(embeddings,shape=[-1,dfm_params['field_size'] * dfm_params['embedding_size']])\n",
    "\n",
    "for i in range(0,len(dfm_params['deep_layers'])):\n",
    "    y_deep = tf.add(tf.matmul(y_deep,weights[\"layer_%d\" %i]), weights[\"bias_%d\"%i])\n",
    "    y_deep = tf.nn.relu(y_deep)\n",
    "\n",
    "\"\"\"final layer\"\"\"\n",
    "if dfm_params['use_fm'] and dfm_params['use_deep']:\n",
    "    concat_input = tf.concat([fm_first_order,fm_second_order,y_deep],axis=1)\n",
    "elif dfm_params['use_fm']:\n",
    "    concat_input = tf.concat([fm_first_order,fm_second_order],axis=1)\n",
    "elif dfm_params['use_deep']:\n",
    "    concat_input = y_deep\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor 'concat:0' shape=(?, 97) dtype=float32>"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "concat_input"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Variable 'Variable_4:0' shape=(97, 1) dtype=float32_ref>"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "weights['concat_projection']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [],
   "source": [
    "out = tf.add(tf.matmul(concat_input,weights['concat_projection']),weights['concat_bias'],name=\"out\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From C:\\Users\\S\\AppData\\Local\\Temp/ipykernel_14420/2919031168.py:2: The name tf.losses.mean_squared_error is deprecated. Please use tf.compat.v1.losses.mean_squared_error instead.\n",
      "\n",
      "WARNING:tensorflow:From D:\\evo\\anaconda\\envs\\tf1.14\\lib\\site-packages\\tensorflow\\python\\ops\\losses\\losses_impl.py:121: add_dispatch_support.<locals>.wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use tf.where in 2.0, which has the same broadcast rule as np.where\n",
      "WARNING:tensorflow:From C:\\Users\\S\\AppData\\Local\\Temp/ipykernel_14420/2919031168.py:5: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n",
      "\n",
      "WARNING:tensorflow:From C:\\Users\\S\\AppData\\Local\\Temp/ipykernel_14420/2919031168.py:8: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n",
      "\n",
      "WARNING:tensorflow:From C:\\Users\\S\\AppData\\Local\\Temp/ipykernel_14420/2919031168.py:9: The name tf.global_variables_initializer is deprecated. Please use tf.compat.v1.global_variables_initializer instead.\n",
      "\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "7febbe46905645c1988032d77446f604",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "  0%|          | 0/500 [00:00<?, ?it/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch 0,loss is 1.2042495\n",
      "epoch 1,loss is 0.95020866\n",
      "epoch 2,loss is 0.85299253\n",
      "epoch 3,loss is 0.83230394\n",
      "epoch 4,loss is 0.8102838\n",
      "epoch 5,loss is 0.7453936\n",
      "epoch 6,loss is 0.66421294\n",
      "epoch 7,loss is 0.5877376\n",
      "epoch 8,loss is 0.52008253\n",
      "epoch 9,loss is 0.4533577\n",
      "epoch 10,loss is 0.38429478\n",
      "epoch 11,loss is 0.33466005\n",
      "epoch 12,loss is 0.3312585\n",
      "epoch 13,loss is 0.32154682\n",
      "epoch 14,loss is 0.32045624\n",
      "epoch 15,loss is 0.30728495\n",
      "epoch 16,loss is 0.28309438\n",
      "epoch 17,loss is 0.2723832\n",
      "epoch 18,loss is 0.2663982\n",
      "epoch 19,loss is 0.26196286\n",
      "epoch 20,loss is 0.2630169\n",
      "epoch 21,loss is 0.26052794\n",
      "epoch 22,loss is 0.25293693\n",
      "epoch 23,loss is 0.24786933\n",
      "epoch 24,loss is 0.247832\n",
      "epoch 25,loss is 0.24749912\n",
      "epoch 26,loss is 0.24478759\n",
      "epoch 27,loss is 0.24143165\n",
      "epoch 28,loss is 0.23904057\n",
      "epoch 29,loss is 0.23797767\n",
      "epoch 30,loss is 0.23614864\n",
      "epoch 31,loss is 0.23341249\n",
      "epoch 32,loss is 0.2315838\n",
      "epoch 33,loss is 0.23079427\n",
      "epoch 34,loss is 0.22959311\n",
      "epoch 35,loss is 0.22762884\n",
      "epoch 36,loss is 0.22596167\n",
      "epoch 37,loss is 0.22525252\n",
      "epoch 38,loss is 0.22436367\n",
      "epoch 39,loss is 0.22283195\n",
      "epoch 40,loss is 0.22137626\n",
      "epoch 41,loss is 0.22050638\n",
      "epoch 42,loss is 0.21977226\n",
      "epoch 43,loss is 0.21883309\n",
      "epoch 44,loss is 0.21770486\n",
      "epoch 45,loss is 0.21672975\n",
      "epoch 46,loss is 0.21609287\n",
      "epoch 47,loss is 0.21555737\n",
      "epoch 48,loss is 0.21483088\n",
      "epoch 49,loss is 0.21423095\n",
      "epoch 50,loss is 0.21385488\n",
      "epoch 51,loss is 0.21350089\n",
      "epoch 52,loss is 0.21294864\n",
      "epoch 53,loss is 0.21232608\n",
      "epoch 54,loss is 0.211767\n",
      "epoch 55,loss is 0.21114159\n",
      "epoch 56,loss is 0.21048754\n",
      "epoch 57,loss is 0.20990995\n",
      "epoch 58,loss is 0.20929888\n",
      "epoch 59,loss is 0.20867391\n",
      "epoch 60,loss is 0.20805745\n",
      "epoch 61,loss is 0.20736688\n",
      "epoch 62,loss is 0.20672975\n",
      "epoch 63,loss is 0.20613983\n",
      "epoch 64,loss is 0.20541464\n",
      "epoch 65,loss is 0.20465954\n",
      "epoch 66,loss is 0.20380698\n",
      "epoch 67,loss is 0.20289953\n",
      "epoch 68,loss is 0.2020188\n",
      "epoch 69,loss is 0.20102191\n",
      "epoch 70,loss is 0.2001161\n",
      "epoch 71,loss is 0.19907586\n",
      "epoch 72,loss is 0.19914523\n",
      "epoch 73,loss is 0.20125759\n",
      "epoch 74,loss is 0.19873413\n",
      "epoch 75,loss is 0.19489312\n",
      "epoch 76,loss is 0.19680266\n",
      "epoch 77,loss is 0.19336063\n",
      "epoch 78,loss is 0.19324349\n",
      "epoch 79,loss is 0.1921388\n",
      "epoch 80,loss is 0.18958019\n",
      "epoch 81,loss is 0.1900727\n",
      "epoch 82,loss is 0.18711701\n",
      "epoch 83,loss is 0.18657857\n",
      "epoch 84,loss is 0.18558598\n",
      "epoch 85,loss is 0.18284164\n",
      "epoch 86,loss is 0.18230273\n",
      "epoch 87,loss is 0.1805082\n",
      "epoch 88,loss is 0.1780593\n",
      "epoch 89,loss is 0.1768056\n",
      "epoch 90,loss is 0.17516565\n",
      "epoch 91,loss is 0.17264947\n",
      "epoch 92,loss is 0.17051317\n",
      "epoch 93,loss is 0.16862737\n",
      "epoch 94,loss is 0.16706732\n",
      "epoch 95,loss is 0.16558588\n",
      "epoch 96,loss is 0.16337216\n",
      "epoch 97,loss is 0.160873\n",
      "epoch 98,loss is 0.15774134\n",
      "epoch 99,loss is 0.15515849\n",
      "epoch 100,loss is 0.15305506\n",
      "epoch 101,loss is 0.15144405\n",
      "epoch 102,loss is 0.1521199\n",
      "epoch 103,loss is 0.15546004\n",
      "epoch 104,loss is 0.15568556\n",
      "epoch 105,loss is 0.14469989\n",
      "epoch 106,loss is 0.14368904\n",
      "epoch 107,loss is 0.14623047\n",
      "epoch 108,loss is 0.13747747\n",
      "epoch 109,loss is 0.13764374\n",
      "epoch 110,loss is 0.13824879\n",
      "epoch 111,loss is 0.13130161\n",
      "epoch 112,loss is 0.13122013\n",
      "epoch 113,loss is 0.13031824\n",
      "epoch 114,loss is 0.12567453\n",
      "epoch 115,loss is 0.12517883\n",
      "epoch 116,loss is 0.124501035\n",
      "epoch 117,loss is 0.119742185\n",
      "epoch 118,loss is 0.11982087\n",
      "epoch 119,loss is 0.119073495\n",
      "epoch 120,loss is 0.11517671\n",
      "epoch 121,loss is 0.11367104\n",
      "epoch 122,loss is 0.113751724\n",
      "epoch 123,loss is 0.11120517\n",
      "epoch 124,loss is 0.10855833\n",
      "epoch 125,loss is 0.1081849\n",
      "epoch 126,loss is 0.10717245\n",
      "epoch 127,loss is 0.10469928\n",
      "epoch 128,loss is 0.102323815\n",
      "epoch 129,loss is 0.10122012\n",
      "epoch 130,loss is 0.100322165\n",
      "epoch 131,loss is 0.0989655\n",
      "epoch 132,loss is 0.096780315\n",
      "epoch 133,loss is 0.094489805\n",
      "epoch 134,loss is 0.09270701\n",
      "epoch 135,loss is 0.091441005\n",
      "epoch 136,loss is 0.090342954\n",
      "epoch 137,loss is 0.08995745\n",
      "epoch 138,loss is 0.09004853\n",
      "epoch 139,loss is 0.09079846\n",
      "epoch 140,loss is 0.08897825\n",
      "epoch 141,loss is 0.08506412\n",
      "epoch 142,loss is 0.080252096\n",
      "epoch 143,loss is 0.079553984\n",
      "epoch 144,loss is 0.08065122\n",
      "epoch 145,loss is 0.07933658\n",
      "epoch 146,loss is 0.07583131\n",
      "epoch 147,loss is 0.07280582\n",
      "epoch 148,loss is 0.07222107\n",
      "epoch 149,loss is 0.07287795\n",
      "epoch 150,loss is 0.07217576\n",
      "epoch 151,loss is 0.07024474\n",
      "epoch 152,loss is 0.06749574\n",
      "epoch 153,loss is 0.065347835\n",
      "epoch 154,loss is 0.0645101\n",
      "epoch 155,loss is 0.064417765\n",
      "epoch 156,loss is 0.065213196\n",
      "epoch 157,loss is 0.0662136\n",
      "epoch 158,loss is 0.06770007\n",
      "epoch 159,loss is 0.06633198\n",
      "epoch 160,loss is 0.06248453\n",
      "epoch 161,loss is 0.058435973\n",
      "epoch 162,loss is 0.05878031\n",
      "epoch 163,loss is 0.061114904\n",
      "epoch 164,loss is 0.06094542\n",
      "epoch 165,loss is 0.058156442\n",
      "epoch 166,loss is 0.05520579\n",
      "epoch 167,loss is 0.05507455\n",
      "epoch 168,loss is 0.056599416\n",
      "epoch 169,loss is 0.057763677\n",
      "epoch 170,loss is 0.05714436\n",
      "epoch 171,loss is 0.05392074\n",
      "epoch 172,loss is 0.051876847\n",
      "epoch 173,loss is 0.05210339\n",
      "epoch 174,loss is 0.053419005\n",
      "epoch 175,loss is 0.053112134\n",
      "epoch 176,loss is 0.051326375\n",
      "epoch 177,loss is 0.049411412\n",
      "epoch 178,loss is 0.048679926\n",
      "epoch 179,loss is 0.04871933\n",
      "epoch 180,loss is 0.0492748\n",
      "epoch 181,loss is 0.04944199\n",
      "epoch 182,loss is 0.049043957\n",
      "epoch 183,loss is 0.04878332\n",
      "epoch 184,loss is 0.04775259\n",
      "epoch 185,loss is 0.046541676\n",
      "epoch 186,loss is 0.04555199\n",
      "epoch 187,loss is 0.044351295\n",
      "epoch 188,loss is 0.043988552\n",
      "epoch 189,loss is 0.04383919\n",
      "epoch 190,loss is 0.044278953\n",
      "epoch 191,loss is 0.044853732\n",
      "epoch 192,loss is 0.04634827\n",
      "epoch 193,loss is 0.048927672\n",
      "epoch 194,loss is 0.049247727\n",
      "epoch 195,loss is 0.048953846\n",
      "epoch 196,loss is 0.044560086\n",
      "epoch 197,loss is 0.040550694\n",
      "epoch 198,loss is 0.041928146\n",
      "epoch 199,loss is 0.04389893\n",
      "epoch 200,loss is 0.042417683\n",
      "epoch 201,loss is 0.04099896\n",
      "epoch 202,loss is 0.038986806\n",
      "epoch 203,loss is 0.03926943\n",
      "epoch 204,loss is 0.04070262\n",
      "epoch 205,loss is 0.040119823\n",
      "epoch 206,loss is 0.039324038\n",
      "epoch 207,loss is 0.037399583\n",
      "epoch 208,loss is 0.036412977\n",
      "epoch 209,loss is 0.037291776\n",
      "epoch 210,loss is 0.037384562\n",
      "epoch 211,loss is 0.03754763\n",
      "epoch 212,loss is 0.037166875\n",
      "epoch 213,loss is 0.035722136\n",
      "epoch 214,loss is 0.03479794\n",
      "epoch 215,loss is 0.03420742\n",
      "epoch 216,loss is 0.03387399\n",
      "epoch 217,loss is 0.034032326\n",
      "epoch 218,loss is 0.034410335\n",
      "epoch 219,loss is 0.034518775\n",
      "epoch 220,loss is 0.034749657\n",
      "epoch 221,loss is 0.03549679\n",
      "epoch 222,loss is 0.035602346\n",
      "epoch 223,loss is 0.03646273\n",
      "epoch 224,loss is 0.03577709\n",
      "epoch 225,loss is 0.033875644\n",
      "epoch 226,loss is 0.0321726\n",
      "epoch 227,loss is 0.030714624\n",
      "epoch 228,loss is 0.030497529\n",
      "epoch 229,loss is 0.03148256\n",
      "epoch 230,loss is 0.032278143\n",
      "epoch 231,loss is 0.032715015\n",
      "epoch 232,loss is 0.03280097\n",
      "epoch 233,loss is 0.03177706\n",
      "epoch 234,loss is 0.030395985\n",
      "epoch 235,loss is 0.029096724\n",
      "epoch 236,loss is 0.028418679\n",
      "epoch 237,loss is 0.028901923\n",
      "epoch 238,loss is 0.030305356\n",
      "epoch 239,loss is 0.03213144\n",
      "epoch 240,loss is 0.03261686\n",
      "epoch 241,loss is 0.031945158\n",
      "epoch 242,loss is 0.02965081\n",
      "epoch 243,loss is 0.02740517\n",
      "epoch 244,loss is 0.02671689\n",
      "epoch 245,loss is 0.027516007\n",
      "epoch 246,loss is 0.029366843\n",
      "epoch 247,loss is 0.030846586\n",
      "epoch 248,loss is 0.03109031\n",
      "epoch 249,loss is 0.028460747\n",
      "epoch 250,loss is 0.025818145\n",
      "epoch 251,loss is 0.025351789\n",
      "epoch 252,loss is 0.02691625\n",
      "epoch 253,loss is 0.028336136\n",
      "epoch 254,loss is 0.028331757\n",
      "epoch 255,loss is 0.026441175\n",
      "epoch 256,loss is 0.024513133\n",
      "epoch 257,loss is 0.02429669\n",
      "epoch 258,loss is 0.025267176\n",
      "epoch 259,loss is 0.026192289\n",
      "epoch 260,loss is 0.026176684\n",
      "epoch 261,loss is 0.025010156\n",
      "epoch 262,loss is 0.023687884\n",
      "epoch 263,loss is 0.023135224\n",
      "epoch 264,loss is 0.023443373\n",
      "epoch 265,loss is 0.02412112\n",
      "epoch 266,loss is 0.024556633\n",
      "epoch 267,loss is 0.024538977\n",
      "epoch 268,loss is 0.023982953\n",
      "epoch 269,loss is 0.023199068\n",
      "epoch 270,loss is 0.022411488\n",
      "epoch 271,loss is 0.022026094\n",
      "epoch 272,loss is 0.022050422\n",
      "epoch 273,loss is 0.02232424\n",
      "epoch 274,loss is 0.022718301\n",
      "epoch 275,loss is 0.023225833\n",
      "epoch 276,loss is 0.02374038\n",
      "epoch 277,loss is 0.024122907\n",
      "epoch 278,loss is 0.024351953\n",
      "epoch 279,loss is 0.023708576\n",
      "epoch 280,loss is 0.022618553\n",
      "epoch 281,loss is 0.021466414\n",
      "epoch 282,loss is 0.020961687\n",
      "epoch 283,loss is 0.0207908\n",
      "epoch 284,loss is 0.020823566\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch 285,loss is 0.0211401\n",
      "epoch 286,loss is 0.021632137\n",
      "epoch 287,loss is 0.022039756\n",
      "epoch 288,loss is 0.022276754\n",
      "epoch 289,loss is 0.022714086\n",
      "epoch 290,loss is 0.022688972\n",
      "epoch 291,loss is 0.02246482\n",
      "epoch 292,loss is 0.021896308\n",
      "epoch 293,loss is 0.021242162\n",
      "epoch 294,loss is 0.020389304\n",
      "epoch 295,loss is 0.01969182\n",
      "epoch 296,loss is 0.019499475\n",
      "epoch 297,loss is 0.019485364\n",
      "epoch 298,loss is 0.019547962\n",
      "epoch 299,loss is 0.019946923\n",
      "epoch 300,loss is 0.020678982\n",
      "epoch 301,loss is 0.02148961\n",
      "epoch 302,loss is 0.022773964\n",
      "epoch 303,loss is 0.023745747\n",
      "epoch 304,loss is 0.02423609\n",
      "epoch 305,loss is 0.023445012\n",
      "epoch 306,loss is 0.021664826\n",
      "epoch 307,loss is 0.019512374\n",
      "epoch 308,loss is 0.018518083\n",
      "epoch 309,loss is 0.01905327\n",
      "epoch 310,loss is 0.020399788\n",
      "epoch 311,loss is 0.021456229\n",
      "epoch 312,loss is 0.021015432\n",
      "epoch 313,loss is 0.019600516\n",
      "epoch 314,loss is 0.018344637\n",
      "epoch 315,loss is 0.017953712\n",
      "epoch 316,loss is 0.018472124\n",
      "epoch 317,loss is 0.019375062\n",
      "epoch 318,loss is 0.019735126\n",
      "epoch 319,loss is 0.01936473\n",
      "epoch 320,loss is 0.01862946\n",
      "epoch 321,loss is 0.017853666\n",
      "epoch 322,loss is 0.0174615\n",
      "epoch 323,loss is 0.017362304\n",
      "epoch 324,loss is 0.017590776\n",
      "epoch 325,loss is 0.017997678\n",
      "epoch 326,loss is 0.01823308\n",
      "epoch 327,loss is 0.01842875\n",
      "epoch 328,loss is 0.018474644\n",
      "epoch 329,loss is 0.018392136\n",
      "epoch 330,loss is 0.018111363\n",
      "epoch 331,loss is 0.017634068\n",
      "epoch 332,loss is 0.017247597\n",
      "epoch 333,loss is 0.01694808\n",
      "epoch 334,loss is 0.01666996\n",
      "epoch 335,loss is 0.016552782\n",
      "epoch 336,loss is 0.016452823\n",
      "epoch 337,loss is 0.016374484\n",
      "epoch 338,loss is 0.016278991\n",
      "epoch 339,loss is 0.016157845\n",
      "epoch 340,loss is 0.0160759\n",
      "epoch 341,loss is 0.016054828\n",
      "epoch 342,loss is 0.01600136\n",
      "epoch 343,loss is 0.0159313\n",
      "epoch 344,loss is 0.015869182\n",
      "epoch 345,loss is 0.015818926\n",
      "epoch 346,loss is 0.015879454\n",
      "epoch 347,loss is 0.01619362\n",
      "epoch 348,loss is 0.017160079\n",
      "epoch 349,loss is 0.019916695\n",
      "epoch 350,loss is 0.026706075\n",
      "epoch 351,loss is 0.04112413\n",
      "epoch 352,loss is 0.04912426\n",
      "epoch 353,loss is 0.039270964\n",
      "epoch 354,loss is 0.017309217\n",
      "epoch 355,loss is 0.03255889\n",
      "epoch 356,loss is 0.035908885\n",
      "epoch 357,loss is 0.018303433\n",
      "epoch 358,loss is 0.032686926\n",
      "epoch 359,loss is 0.026076265\n",
      "epoch 360,loss is 0.02057233\n",
      "epoch 361,loss is 0.028692443\n",
      "epoch 362,loss is 0.018228432\n",
      "epoch 363,loss is 0.024146799\n",
      "epoch 364,loss is 0.019999739\n",
      "epoch 365,loss is 0.020048162\n",
      "epoch 366,loss is 0.021139242\n",
      "epoch 367,loss is 0.01808049\n",
      "epoch 368,loss is 0.020247487\n",
      "epoch 369,loss is 0.017954603\n",
      "epoch 370,loss is 0.018209238\n",
      "epoch 371,loss is 0.018430026\n",
      "epoch 372,loss is 0.017005863\n",
      "epoch 373,loss is 0.017732246\n",
      "epoch 374,loss is 0.016897429\n",
      "epoch 375,loss is 0.01658819\n",
      "epoch 376,loss is 0.017249981\n",
      "epoch 377,loss is 0.015661199\n",
      "epoch 378,loss is 0.016703084\n",
      "epoch 379,loss is 0.015699916\n",
      "epoch 380,loss is 0.01560042\n",
      "epoch 381,loss is 0.01598303\n",
      "epoch 382,loss is 0.014947403\n",
      "epoch 383,loss is 0.01560782\n",
      "epoch 384,loss is 0.01511418\n",
      "epoch 385,loss is 0.014902427\n",
      "epoch 386,loss is 0.015329833\n",
      "epoch 387,loss is 0.014434215\n",
      "epoch 388,loss is 0.014883308\n",
      "epoch 389,loss is 0.014501633\n",
      "epoch 390,loss is 0.014442262\n",
      "epoch 391,loss is 0.014496649\n",
      "epoch 392,loss is 0.014259274\n",
      "epoch 393,loss is 0.014250086\n",
      "epoch 394,loss is 0.014175016\n",
      "epoch 395,loss is 0.014021158\n",
      "epoch 396,loss is 0.01398529\n",
      "epoch 397,loss is 0.013896805\n",
      "epoch 398,loss is 0.013784953\n",
      "epoch 399,loss is 0.013799072\n",
      "epoch 400,loss is 0.013608084\n",
      "epoch 401,loss is 0.013671436\n",
      "epoch 402,loss is 0.0135307675\n",
      "epoch 403,loss is 0.013482955\n",
      "epoch 404,loss is 0.013480676\n",
      "epoch 405,loss is 0.013365155\n",
      "epoch 406,loss is 0.0133193135\n",
      "epoch 407,loss is 0.013263485\n",
      "epoch 408,loss is 0.013246121\n",
      "epoch 409,loss is 0.013138761\n",
      "epoch 410,loss is 0.013115068\n",
      "epoch 411,loss is 0.01308405\n",
      "epoch 412,loss is 0.013000995\n",
      "epoch 413,loss is 0.012968702\n",
      "epoch 414,loss is 0.012928753\n",
      "epoch 415,loss is 0.01290337\n",
      "epoch 416,loss is 0.012835014\n",
      "epoch 417,loss is 0.012774746\n",
      "epoch 418,loss is 0.012758935\n",
      "epoch 419,loss is 0.012713317\n",
      "epoch 420,loss is 0.0126728965\n",
      "epoch 421,loss is 0.012627652\n",
      "epoch 422,loss is 0.012584068\n",
      "epoch 423,loss is 0.012556947\n",
      "epoch 424,loss is 0.0125270095\n",
      "epoch 425,loss is 0.012470808\n",
      "epoch 426,loss is 0.012449104\n",
      "epoch 427,loss is 0.012408849\n",
      "epoch 428,loss is 0.0123725\n",
      "epoch 429,loss is 0.012339217\n",
      "epoch 430,loss is 0.012308968\n",
      "epoch 431,loss is 0.012269142\n",
      "epoch 432,loss is 0.012235911\n",
      "epoch 433,loss is 0.012202081\n",
      "epoch 434,loss is 0.012168977\n",
      "epoch 435,loss is 0.012131752\n",
      "epoch 436,loss is 0.0121006295\n",
      "epoch 437,loss is 0.012057498\n",
      "epoch 438,loss is 0.012037963\n",
      "epoch 439,loss is 0.011990518\n",
      "epoch 440,loss is 0.0119727\n",
      "epoch 441,loss is 0.011954739\n",
      "epoch 442,loss is 0.01194273\n",
      "epoch 443,loss is 0.011922099\n",
      "epoch 444,loss is 0.0119137755\n",
      "epoch 445,loss is 0.011864784\n",
      "epoch 446,loss is 0.011823846\n",
      "epoch 447,loss is 0.011770834\n",
      "epoch 448,loss is 0.011734831\n",
      "epoch 449,loss is 0.011708655\n",
      "epoch 450,loss is 0.01169224\n",
      "epoch 451,loss is 0.011682723\n",
      "epoch 452,loss is 0.01165432\n",
      "epoch 453,loss is 0.011625514\n",
      "epoch 454,loss is 0.011597826\n",
      "epoch 455,loss is 0.0115616415\n",
      "epoch 456,loss is 0.01155634\n",
      "epoch 457,loss is 0.011635662\n",
      "epoch 458,loss is 0.011818066\n",
      "epoch 459,loss is 0.01205318\n",
      "epoch 460,loss is 0.012528652\n",
      "epoch 461,loss is 0.012769507\n",
      "epoch 462,loss is 0.013264714\n",
      "epoch 463,loss is 0.0138216205\n",
      "epoch 464,loss is 0.014588307\n",
      "epoch 465,loss is 0.014907422\n",
      "epoch 466,loss is 0.0142427\n",
      "epoch 467,loss is 0.012855086\n",
      "epoch 468,loss is 0.012059051\n",
      "epoch 469,loss is 0.011821934\n",
      "epoch 470,loss is 0.01190266\n",
      "epoch 471,loss is 0.011791608\n",
      "epoch 472,loss is 0.0120744\n",
      "epoch 473,loss is 0.012727117\n",
      "epoch 474,loss is 0.012299147\n",
      "epoch 475,loss is 0.011593896\n",
      "epoch 476,loss is 0.011209777\n",
      "epoch 477,loss is 0.011360015\n",
      "epoch 478,loss is 0.011491681\n",
      "epoch 479,loss is 0.011314264\n",
      "epoch 480,loss is 0.011362257\n",
      "epoch 481,loss is 0.011731675\n",
      "epoch 482,loss is 0.011830827\n",
      "epoch 483,loss is 0.011500283\n",
      "epoch 484,loss is 0.011190282\n",
      "epoch 485,loss is 0.011224118\n",
      "epoch 486,loss is 0.0114449505\n",
      "epoch 487,loss is 0.011100578\n",
      "epoch 488,loss is 0.010793027\n",
      "epoch 489,loss is 0.01078626\n",
      "epoch 490,loss is 0.010926986\n",
      "epoch 491,loss is 0.010858975\n",
      "epoch 492,loss is 0.010653073\n",
      "epoch 493,loss is 0.010544858\n",
      "epoch 494,loss is 0.010627867\n",
      "epoch 495,loss is 0.01067881\n",
      "epoch 496,loss is 0.010652458\n",
      "epoch 497,loss is 0.010521867\n",
      "epoch 498,loss is 0.010499619\n",
      "epoch 499,loss is 0.0106496485\n"
     ]
    }
   ],
   "source": [
    "\"\"\"loss and optimizer\"\"\"\n",
    "loss = tf.losses.mean_squared_error(label, out)\n",
    "#optimizer = tf.train.AdamOptimizer(learning_rate=dfm_params['learning_rate'], beta1=0.9, beta2=0.999,epsilon=1e-8).minimize(loss)\n",
    "\n",
    "optimizer = tf.train.AdamOptimizer(learning_rate=dfm_params['learning_rate']).minimize(loss)\n",
    "\n",
    "\"\"\"train\"\"\"\n",
    "sess  =tf.Session()\n",
    "sess.run(tf.global_variables_initializer())\n",
    "\n",
    "loss_polt = []\n",
    "for i in tqdm(range(dfm_params['epoch'])):\n",
    "    epoch_loss,_ = sess.run([loss,optimizer],feed_dict={feat_index:train_feature_index,\n",
    "                            feat_value:train_feature_value,\n",
    "                             label:train_y})\n",
    "    print(\"epoch %s,loss is %s\" % (str(i),str(epoch_loss)))\n",
    "    loss_polt.append(epoch_loss)\n",
    "    \n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlMAAAJDCAYAAAAvl5FzAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAu70lEQVR4nO3deZxeZX03/s+VBBKILMpu2CVKUxaBsFhEwYVFENxQBMUqylOLPvws/tzqo5W2j9pSFS0qUBfEBdFWBUSRIgrYogQFWZVNVtllDSEJnOePa9IMIctkzj1zZuZ+v1+vec19n/tk7u/Mgcwn3+s611WapgkAAMMzqesCAADGM2EKAKAFYQoAoAVhCgCgBWEKAKAFYQoAoIUVhqlSypdLKXeXUq5cxuullPLZUsr1pZTfllJ27H2ZAABj01A6U19Nsu9yXt8vycyBjyOTfKF9WQAA48MKw1TTNBckuX85pxyU5GtNdXGStUspG/WqQACAsawXc6ZmJLl10PPbBo4BAEx4U0bzzUopR6YOBWb69Ok7bb311iP6fg8/nNxxR/K8543o2wAAE9yll156b9M06y3ttV6EqduTbDLo+cYDx56maZqTkpyUJLNnz27mzJnTg7dftgsvTD70ofoZAGC4Sik3L+u1XgzznZHk8IG7+nZL8mDTNH/swddtbdKk5Mknu64CAJjIVtiZKqV8K8meSdYtpdyW5KNJVkmSpmm+mOTsJK9Icn2SuUneOlLFrixhCgAYaSsMU03TvHEFrzdJjupZRT0kTAEAI21Cr4AuTAEAI02YAgBoQZgCAGhBmAIAaEGYAgBoYUKHqVKEKQBgZE3oMDVpUtI0XVcBAExkEz5M6UwBACNJmAIAaEGYAgBoQZgCAGhBmAIAaEGYAgBoQZgCAGhBmAIAaEGYAgBoYUKHKdvJAAAjbUKHKdvJAAAjbcKHKZ0pAGAkCVMAAC0IUwAALQhTAAAtTPgw9cQTXVcBAExkEzpMrbJKsmBB11UAABPZhA9TCxdaHgEAGDkTOkyVkkyeXAMVAMBImNBhKjHUBwCMLGEKAKAFYQoAoAVhCgCghQkfpqZMEaYAgJEz4cPUouURAABGQl+EKZ0pAGCkCFMAAC0IUwAALQhTAAAtCFMAAC0IUwAALQhTAAAtCFMAAC0IUwAALQhTAAAtCFMAAC1M+DA1ZYq9+QCAkTPhw5TOFAAwkoQpAIAWhCkAgBaEKQCAFoQpAIAWhCkAgBaEKQCAFoQpAIAWhCkAgBaEKQCAFoQpAIAWJnyYmjJFmAIARs6ED1OrrGKjYwBg5PRFmNKZAgBGijAFANCCMAUA0IIwBQDQgjAFANCCMAUA0IIwBQDQgjAFANCCMAUA0IIwBQDQwoQPU/bmAwBG0oQPUzpTAMBI6oswZaNjAGCk9EWY0pkCAEaKMAUA0IIwBQDQgjAFANCCMAUA0MKED1PTptUw5Y4+AGAkTPgwNXly8qxnJffe23UlAMBENOHDVJKsv35y111dVwEATER9EaY22CC5++6uqwAAJqK+CFM6UwDASOmbMKUzBQCMhL4IU4b5AICR0hdhyjAfADBS+iJM6UwBACOlL8KUzhQAMFL6JkzpTAEAI6GvwlTTdF0JADDR9EWYWn31uuHxQw91XQkAMNH0RZhKDPUBACOjb8LUBhuYhA4A9F7fhCmdKQBgJPRNmLLWFAAwEvomTFlrCgAYCX0VpnSmAIBe65swZQI6ADAS+iZM6UwBACOhb8KUCegAwEjomzBlAjoAMBL6Jkw985nJgw8mTzzRdSUAwETSN2Fq0qRk6tTk8ce7rgQAmEiGFKZKKfuWUn5XSrm+lPKBpby+aSnl/FLKb0opvy2lvKL3pbYnTAEAvbbCMFVKmZzkhCT7JZmV5I2llFlLnPbhJKc3TbNDkkOSfL7XhfaCMAUA9NpQOlO7JLm+aZobm6aZn+S0JActcU6TZM2Bx2sluaN3JfbOtGnJvHldVwEATCRThnDOjCS3Dnp+W5Jdlzjn75L8pJTy7iTTk7ysJ9X1mM4UANBrvZqA/sYkX22aZuMkr0hyainlaV+7lHJkKWVOKWXOPffc06O3Hrpp04QpAKC3hhKmbk+yyaDnGw8cG+yIJKcnSdM0/51kWpJ1l/xCTdOc1DTN7KZpZq+33nrDq7iFqVMN8wEAvTWUMHVJkpmllC1KKaumTjA/Y4lzbkny0iQppfxZapga/dbTChjmAwB6bYVhqmmahUneleScJNek3rV3VSnl2FLKgQOnHZPkHaWUy5N8K8lfNk3TjFTRw2UCOgDQa0OZgJ6mac5OcvYSxz4y6PHVSXbvbWm9pzMFAPRa36yAnpiADgD0Xl+FKRPQAYBe67swpTMFAPRSX4Upw3wAQK/1VZgyzAcA9FpfhSmdKQCg1/oqTOlMAQC91ndhSmcKAOilvgpThvkAgF7rqzBlmA8A6LW+ClM6UwBAr/VVmDJnCgDotb4KU9OmGeYDAHqrr8KUzhQA0GvCFABAC30VpgzzAQC91ldhSmcKAOi1vgpTOlMAQK/1VZjSmQIAek2YAgBooa/ClGE+AKDX+ipM6UwBAL3WV2HK3nwAQK/1VZiaOtUwHwDQW30VpiZPTkpJFi7suhIAYKLoqzCV1O7U/PldVwEATBR9F6ZWXVWYAgB6R5gCAGhBmAIAaEGYAgBoQZgCAGihL8OUhTsBgF7pyzClMwUA9IowBQDQgjAFANCCMAUA0ELfhSnbyQAAvdR3YUpnCgDoJWEKAKAFYQoAoAVhCgCgBWEKAKAFYQoAoAVhCgCghb4MUzY6BgB6pS/DlM4UANArwhQAQAvCFABAC8IUAEALwhQAQAt9F6amThWmAIDe6bswpTMFAPSSMAUA0IIwBQDQgjAFANCCMAUA0EJfhil78wEAvdKXYUpnCgDoFWEKAKAFYQoAoAVhCgCgBWEKAKAFYQoAoAVhCgCgBWEKAKAFYQoAoIW+C1OTJyelJE880XUlAMBE0HdhKtGdAgB6R5gCAGihb8OUzY4BgF7o2zClMwUA9IIwBQDQgjAFANCCMAUA0IIwBQDQgjAFANCCMAUA0IIwBQDQgjAFANCCMAUA0EJfhqmpU4UpAKA3+jJM2ZsPAOiVvg1TOlMAQC8IUwAALQhTAAAtCFMAAC0IUwAALQhTAAAtCFMAAC0IUwAALQhTAAAtCFMAAC0IUwAALQhTAAAt9G2YstExANALfRmmpk7VmQIAeqMvw5RhPgCgV4QpAIAWhCkAgBaEKQCAFoYUpkop+5ZSfldKub6U8oFlnPP6UsrVpZSrSinf7G2ZvSVMAQC9MmVFJ5RSJic5IcnLk9yW5JJSyhlN01w96JyZST6YZPemaf5USll/pAruBWEKAOiVoXSmdklyfdM0NzZNMz/JaUkOWuKcdyQ5oWmaPyVJ0zR397bM3hKmAIBeGUqYmpHk1kHPbxs4Nthzkzy3lPKLUsrFpZR9e1XgSBCmAIBeWeEw30p8nZlJ9kyycZILSinbNk3zwOCTSilHJjkySTbddNMevfXKE6YAgF4ZSmfq9iSbDHq+8cCxwW5LckbTNAuaprkpye9Tw9VTNE1zUtM0s5ummb3eeusNt+bWhCkAoFeGEqYuSTKzlLJFKWXVJIckOWOJc76f2pVKKWXd1GG/G3tXZm8JUwBAr6wwTDVNszDJu5Kck+SaJKc3TXNVKeXYUsqBA6edk+S+UsrVSc5P8v83TXPfSBXdlo2OAYBeKU3TdPLGs2fPbubMmdPJezdNMnlysmBB/QwAsDyllEubppm9tNf6cgX0Ugz1AQC90ZdhKkmmTjXUBwC017dhSmcKAOiFvg1TU6cm8+Z1XQUAMN71dZgyzAcAtNW3YWraNGEKAGivb8OUzhQA0AvCFABAC8IUAEALwhQAQAvCFABAC8IUAEALwhQAQAvCFABAC8IUAEALwhQAQAvCFABAC8IUAEALwhQAQAt9Habmzeu6CgBgvOvrMKUzBQC01bdhato0YQoAaK9vw5TOFADQC8IUAEALwhQAQAvCFABAC8IUAEALwhQAQAvCFABAC8IUAEALwhQAQAvCFABAC8IUAEALwhQAQAvCFABAC30fppqm60oAgPGsb8PUpEnJlCnJ/PldVwIAjGd9G6aSZNo0Q30AQDt9HabMmwIA2hKmhCkAoAVhSpgCAFoQpoQpAKAFYUqYAgBaEKaEKQCgBWFKmAIAWhCmhCkAoAVhSpgCAFoQpoQpAKAFYUqYAgBaEKaEKQCgBWFKmAIAWuj7MDVvXtdVAADjWd+HKZ0pAKCNvg5T06YJUwBAO30dpnSmAIC2hClhCgBoQZgSpgCAFoQpYQoAaEGYEqYAgBaEKWEKAGhBmBKmAIAWhClhCgBoQZgSpgCAFoQpYQoAaEGYEqYAgBaEKWEKAGhBmBKmAIAW+j5MzZvXdRUAwHjW92FKZwoAaKOvw9S0acIUANBOX4cpnSkAoC1h6vGkabquBAAYr/o6TE2enEyalCxc2HUlAMB41ddhKjHUBwC0I0wJUwBAC8KUMAUAtCBMCVMAQAvClDAFALQgTAlTAEALwpQwBQC0IEwJUwBAC8KUMAUAtCBMCVMAQAvC1NRk3ryuqwAAxithSmcKAGhBmBKmAIAW+j5MTZsmTAEAw9f3YUpnCgBoQ5gSpgCAFoQpYQoAaEGYEqYAgBaEKWEKAGhBmBKmAIAWhClhCgBoQZgSpgCAFoQpYQoAaEGYEqYAgBaEKWEKAGhBmBKmAIAWhhSmSin7llJ+V0q5vpTygeWc99pSSlNKmd27EkfW1KnJvHldVwEAjFcrDFOllMlJTkiyX5JZSd5YSpm1lPPWSHJ0kl/2usiRpDMFALQxlM7ULkmub5rmxqZp5ic5LclBSznv75N8Msm46vMIUwBAG0MJUzOS3Dro+W0Dx/5HKWXHJJs0TfPDHtY2KqZNE6YAgOFrPQG9lDIpyaeSHDOEc48spcwppcy555572r51T+hMAQBtDCVM3Z5kk0HPNx44tsgaSbZJ8rNSyh+S7JbkjKVNQm+a5qSmaWY3TTN7vfXWG37VPSRMAQBtDCVMXZJkZilli1LKqkkOSXLGohebpnmwaZp1m6bZvGmazZNcnOTApmnmjEjFPSZMAQBtrDBMNU2zMMm7kpyT5JokpzdNc1Up5dhSyoEjXeBIE6YAgDamDOWkpmnOTnL2Esc+soxz92xf1ugRpgCANvp+BfQpU5Inn0yeeKLrSgCA8ajvw1QpulMAwPD1fZhKhCkAYPiEqQhTAMDwCVNJVlsteeyxrqsAAMYjYSrJ6qsLUwDA8AhTqZ2puXO7rgIAGI+EqdTOlDAFAAyHMBXDfADA8AlTMcwHAAyfMBWdKQBg+ISp6EwBAMMnTMUEdABg+ISpGOYDAIZPmIphPgBg+ISpGOYDAIZPmIphPgBg+ISpGOYDAIZPmIphPgBg+ISp1M6UYT4AYDiEqehMAQDDJ0xFmAIAhk+YimE+AGD4hKnoTAEAwydMxTpTAMDwCVOxzhQAMHzCVAzzAQDDJ0xl8QT0pum6EgBgvBGmkkyenKyySvL4411XAgCMN8LUAEN9AMBwCFMDVl89eeihrqsAAMYbYWrAfvslX/xi11UAAOONMDXgYx9LTj45ueeerisBAMYTYWrAjBnJTjslv/xl15UAAOOJMDXIDjskv/lN11UAAOOJMDXIjjsKUwDAyhGmBtGZAgBWljA1yFZbJffdl1x/fdeVAADjhTA1yKRJyd//fbLrrsmtt3ZdDQAwHghTSzj66GS33ZLLLuu6EgBgPBCmlmKrrQz1AQBDI0wtxVZbJddd13UVAMB4IEwtxcyZOlMAwNAIU0thmA8AGCphaik23zy5445k/vyuKwEAxjphaimmTEk22SS56aauKwEAxjphahk22ii5666uqwAAxjphahnWXTe5996uqwAAxjphahmEKQBgKISpZVhnHWEKAFgxYWoZ1l23bnoMALA8wtQyGOYDAIZCmFoGYQoAGAphahmEKQBgKISpZTABHQAYCmFqGUxABwCGQphahrXWSh59NFmwoOtKAICxTJhahlLqUJ/uFACwPMLUcqy7bnLPPV1XAQCMZcLUcmy+eXLDDV1XAQCMZcLUcmyzTXLFFV1XAQCMZcLUcmy7rTAFACyfMLUc226bXHll11UAAGOZMLUcz3tectNNyeOPd10JADBWCVPLMXVqsuWWulMAwLIJUyvwspclP/5x11UAAGOVMLUCr3pV8v3vd10FADBWCVMrsMcedd7ULbd0XQkAMBYJUyswZUpy8MHJV77SdSUAwFg0pesCxoOjjkr22Sd5+OHk8MOT7bbruiIAYKzQmRqCbbZJ9torufHG5LWvraEKACARpobs619P/uM/klmzku98p+tqAICxQphaSfvtl/z8511XAQCMFcLUSnrxi4UpAGAxYWolbb118thjyc03d10JADAWCFMrqZTkpS9Nzj6760oAgLFAmBqGN70p+drXuq4CABgLhKlh2Hvvuir6Jz5Rt5qZN6/rigCArghTwzBlSnLyycnttycf/3jyV3/VdUUAQFesgD5Mr3xl/XjkkWSHHZIzz6zPAYD+ojPV0jOekfzrvybHHJMsWNB1NQDAaBOmemCffZItt0w++cmuKwEARpthvh750peSnXdO1l8/OeKIZPLkrisCAEaDzlSPzJiR/PjHyZe/nBx8cDJ/ftcVAQCjQZjqoe22q1vNLFiQHH1019UAAKNBmOqxqVOTr3+9rpB+xhldVwMAjDRhagSstVZy+unJ29+efO5zyaOPdl0RADBShKkRsuuuybnnJuedl8yalVx0UdcVAQAjQZgaQdtvX7eb+fznk1e/OvnFL7quCADoNWFqFOy/f/KNb9RAdeKJyWOPdV0RANArwtQo2Xvv5Jxz6qT0zTZLPv3ppGm6rgoAaEuYGkU77JD88IfJhRcm3/xmcsghJqcDwHgnTHXgec+rgWr69OTP/qzOqZo3r+uqAIDhEKY6Mm1aXS39O9+pa1JttVVy/PHmUwHAeCNMdWzXXZOzzkp+8IPk/POT5zwnOe645Lbbuq4MABgKYWqM2GmnuozCmWcmV11Vl1XYfffkM59Jbr656+oAgGURpsaYnXZKvvKV5I9/TD784eS3v0123jnZZJM6Yf0LX0juvLPrKgGARYSpMWrVVZP99qvzqu66qw4B7rdfXfhz662Tl7wkOemk5N57u64UAPqbMDUOlFInqL/lLXUT5T/+MXnXu+pWNc95TrLvvslXv5o88EDXlQJA/xGmxqHVVkte85rk299Obr89+cu/rBPYN9ssOfDAuobVww93XSUA9IchhalSyr6llN+VUq4vpXxgKa//TSnl6lLKb0sp55VSNut9qSzNM55R51J973vJLbckr3td3bpm443r4+9+N5k7t+sqAWDiWmGYKqVMTnJCkv2SzEryxlLKrCVO+02S2U3TbJfku0n+qdeFsmJrrZUcfnhdZf2mm+ocqxNPTGbMSN773uSOO7quEAAmnqF0pnZJcn3TNDc2TTM/yWlJDhp8QtM05zdNs6j/cXGSjXtbJivrWc9KjjgiOffcekfgwoXJNtskRx759KUW5s/vpkYAmAiGEqZmJLl10PPbBo4tyxFJftSmKHprk03qelW//32y/vrJjjsmRx9d7xK84opkgw3qPCsAYOX1dAJ6KeVNSWYn+edlvH5kKWVOKWXOPffc08u3ZgjWXTf5h39IrrkmmTQpmTmzrsD+7nfXcHXrrSv+GgDAU5WmaZZ/QikvSPJ3TdPsM/D8g0nSNM3HlzjvZUk+l+TFTdPcvaI3nj17djNnzpzh1k0PPPpoXXZh9dWT970veeihZJ11kr/6q9rNAgCqUsqlTdPMXtprU4bw5y9JMrOUskWS25MckuTQJd5ghyQnJtl3KEGKsWH69MWPjzkm2WKLZI016l2Bp57aXV0AMJ6scJivaZqFSd6V5Jwk1yQ5vWmaq0opx5ZSDhw47Z+TPCPJd0opl5VSzhixihkRG2yQXHRR3RfwP/8zedGL6kKgK2hcAkDfG0pnKk3TnJ3k7CWOfWTQ45f1uC46sOOO9fNvfpP8+tfJBz9Yl1P44AfrcCAA8HRWQOdpNtwwecUr6npVxx9f51F97WtdVwUAY9OQOlP0p403Ti67LLn22uQNb6jb1bzwhcnkyV1XBgBjhzDFcm20Uf345Cfr5spJ3RNw1pJr4ANAnzLMx5C89a11JfV3vCM59FCrpgPAIsIUQ1ZKXeBz1qy6399XvtJ1RQDQPcN8rJRS6tYzV16Z7L9/nT91+OFdVwUA3RGmGJZttknOPjvZc8/kOc9Jdt+964oAoBuG+Ri2P//zurDna16T/NM/WeATgP6kM0Ur+++fXHpp/Tx/fvLhD3ddEQCMLp0pWtt44+RHP0pOOCE577yuqwGA0SVM0RPPfnbyrW8lb3xj7VQBQL8QpuiZPfdMTjopOeCA5Oqru64GAEaHOVP01KtelTz6aA1Wp5yS7Ldf1xUBwMjSmaLnDjss+d73kiOOSE4+uetqAGBk6UwxInbfPbnwwuQv/iLZeutkjz26rggARobOFCPmOc+pQ31veENy++1dVwMAI0OYYkTtu2/yznfWIT+LegIwEQlTjLgPfCC55ZY6jwoAJhphihG3yirJiScmRx2V3Hnn4uMLFiTnn5/87nfd1QYAbQlTjIo99kiOPDI5+OBk3rx67EMfSg49NHn3u7utDQDaEKYYNR/9aDJjRt0Y+bzzkq98JfnBD5Krruq6MgAYPmGKUTNpUnLqqcmmm9Yhv899Ltl55+Thh5P77++6OgAYHutMMapWWSX54hefemzWrNqdshYVAOORzhSd22abGqaefDJ56KGuqwGAlaMzRee22SaZMye57rrk0kuTn/2s64oAYOiEKTq3//51/tTjjycPPFCXTFhlla6rAoChMcxH52bOTK65Jrn88mSzzZIrr+y6IgAYOmGKMWHVVZN11kl22SX55S+7rgYAhs4wH2PKrrvWzZF/8Ytk+vSn3/kHAGONzhRjykEHJS9+cQ1V3/hGMndu1xUBwPIJU4wpG22UfOITybveleywQ3LBBV1XBADLJ0wxZr385cmPfpQsXNh1JQCwbMIUY9arX51861vJBhskn/1s19UAwNKZgM6Ytc02yd1318U8d9klef3rkw037LoqAHgqnSnGvJkzk9e+NvnqV7uuBACeTphiXDjqqOTjH6+h6oc/7LoaAFhMmGJc2GGH5IYbkgMOSA47LLn33q4rAoBKmGLcWHfd5K1vTV75yuTUU7uuBgAqYYpx58gjk499LNltt+RLX+q6GgD6nbv5GHf22KPu33fLLcnhh9c7/bbdtuuqAOhXOlOMS897Xl3U8z3vSf7hH7quBoB+Jkwxrr3znckVVySHHJL8y78kTz7ZdUUA9BthinFtjTWSiy+ud/uddlpy3HFdVwRAvxGmGPfWXDN5//uTf//32p265JKuKwKgnwhTTBibbpqccEJd2PMjH0nuvLPrigDoB8IUE8rrXpd85SvJAw/UO/yuvLLrigCY6CyNwITz0pfWj9mza5fqkkvqUCAAjASdKSasww9P9toredvbkgULuq4GgIlKmGJC+8xnksceq3f7/fznXVcDwEQkTDGhTZuWnHVWcuyxtVN12GHJI490XRUAE4kwxYRXSvKa1yRXX51MmpQcfLBhPwB6R5iib0yfXu/0mzw5ecc7kqbpuiIAJgJhir4yZUry7W8n11yTfPjDXVcDwEQgTNF3pk9PfvjD5Pvfr3v6DV7cU7cKgJUlTNGX1l23rj+1xRZ1cc/Xv77OpVp77eRv/1aoAmDohCn61uqrJx//eA1VBx2UvOpVyUUX1Y7V6ad3XR0A44UV0Ol7m29ePxY57ri6cfLBB9e7/wBgefyqgCXsu2+94++cc7quBIDxQJiCJZSSvPOdyb/9W9eVADAeCFOwFIcckvz0p8m11yZf/Wry6193XREAY5UwBUux5prJ//k/ya671oU+9903OfvsrqsCYCwSpmAZ/uZvkgceqBskn3JKcswxycKFXVcFwFgjTMFylFI/77tvssEG5lEB8HSWRoAhKCX57GeTl72sfjz0UF3gc8stu64MgK4JUzBE222XfPCDyS67JM96Vg1Uhx+e/PM/L+5gAdB/DPPBSnjPe5J7702uuy753e/qHX/HHdd1VQB0SZiClTRpUu1EPfOZyfe+l/zTPyW//e3Tz/vVryz8CdAPhCloYbPNkuOPr3v73X57PfanPyV/93fJAQckRx6ZfOYzXVYIwEgzZwpaOvTQGqS22SbZfvvkiiuSAw9MfvnL5Iknkt12S972trp2FQATT2mappM3nj17djNnzpxO3htGwt13J1demWy1VbLppouPH3po8vznJ+97X2elAdBSKeXSpmlmL+01w3zQI+uvn7zkJU8NUknygQ/Uob5585LPfz7ZZJM6kR2AiUGYghG23XbJjjsmr31tcuyxyXe/m5x5ZnLWWYvPaZoatgAYf4QpGAXHHZe86EV1KYVdd01OPjl55zuTBx9Mbrkl2XvvZK21kr/+664rBWBlCVMwCrbeOnn/+5NZs+rzvfZKXvGK5KUvreFqr72SP/4xOffc5Mc/7rZWAFaOu/mgI8cfX9ehevazk513rsc+/enasfqv/6prV33hCzVsvf/9dX0rAMYeYQo6Mm1aXZ9qsAMOSC6/vK5ftdFGyUc/mpxySvL739dNlidP7qZWAJbN0ggwBj38cPKMZ9SV1h99tIasdddN3vve5LHHkgsvrPOtZs5MZs9OdtppxV/zV79K/vu/kxkzkle/WjBj9D32WHLZZckLXtB1JbDyLI0A48waayzePHn69OSHP6zB6cgj62bLDz+crLdeXRj0da9L9t+/7hW4LBdfXAPZ73+f/O3fJqedNjrfBwx25pnJm9/cdRXQe4b5YBxYffXk//7f+rGk+fOTz30u2X335PDDk499rIaxRS6/PHn965MTT6wdqbPOqsOHhx66OLDBaLjoouSGG5KHHrIjABOLzhSMc6uumhxzTHL11XVfwK23Tg47LDnqqLq+1V571c2YX/3qev4rXpE88khywQV1fauTT06uvbbb74H+cNFFtdN6+eVdVwK9Zc4UTDBXXZXMmZPcf3+9S3DXXZNVVnnqOV/8Yh06fP7zk29+s3YKTjihdrBgJDz0UL1z9fWvr//d/e//3XVFsHKWN2fKMB9MMH/+5/VjeQ4/PPnwh5P77kt+8YvkjjvqCu23315fO/roZOrU5C1vSfbYw3Ag7f3613Uj8F13rXP9YCIxzAd9aPXVk0svTX7+82TDDetw4M9/Xte+mjGj7jP4Z39WJ7wfcUTy5JP1zy1YUDd0tvUNK+uGG+pNFNtuWzcEh4lEZwr61GabPfX5ppvWOwJLqfOwkrqA6P771+7UQw/VuVVrrZXMnVvnZX32s8lqqy37PebPr1vo/OEPdXjnWc8asW+HMe6mm5IttqhDfXfdVY81TZ3n578LxjthCvgfU6c+9fn06cl559XNmddZp25/U0oNVkcdlTz3ucn/+l/Jy16WrL12XXrhuuuSO+9Mzj+/rim07bb1vI9/PPnP/6zdCfrPTTcl++xTl/S4++4apL7xjeTv/375y3rAeCBMAcs1eXLyhjc89diaayannlqHCk85pQarRx5Jttyy3k244YbJP/5jDVmLFgf90pfqnYU/+cniPQrnzq17EV5xRQ1jb397DXBLuvTSOrF+n32SDTYY0W+XEXLjjfW/j+nT69ZI99yTfOhDNVgtWJD86EfJgQd2XSUMjzAFDNtOOw1t9fWkzr2aNi3Zc8/kIx+p3a3PfrZ2rnbdtd42/6lP1ZD2ohfVP9M0tXNx8sn1zsS/+ZvaKdt++8Vfd/78enfinXfWSfWrr97zb5MeWDTMl9Q5eT/+cQ3GjzyS/OxndfHZ++576hppMF4IU8CoOeywZKut6iKj06bVYb9ttln8+tln1y7YoYcmr3xlDVFXXplcckntdn3723U/w1/9qv5C/tOf6nmLfgG/973J5z+/+Os9+GANW+utN7rfJ081d269FhttVJ+vv369hs95Tr2Z4aKLanfqpz+t1/faa2uHE8YLd/MBo2rXXZOvf71u3Dw4SCV1QdHf/KYGoPe9r06K/+//rkEqqUHrTW+qAerUU+uw4c471zWzTjutDhWdeWY991vfSjbfvA4t/cu/jOq3yBL+8Id6LScN/MZZf/26Ftpmm9W7Ry+8sA7//fjHNUjtsEOycGGnJcNK0ZkCxpQNN6ydq2U59tj6y/jb307e8566LlYp9S7DU0+t62W96EV1ntXPflbvFNtzz2TKlDq36+tfTz75yTpX5+1vr9vvTJu2+OsvXFi7JdOnW1+rV37726d2mtZfPzn33LqO2b331uD7lrfUzuSOO9af/3XX1eU5YDzQmQLGlUmT6urZZ51VfwEPDjwvfGHygx8kL3hBHRrcfvtkk03q8NG//mvyjGfUCfNf/GINWzfcUH95n3xy8oUvJPvuWyfCr7de7Wh96EPJLbc89f2feKIuQPmpT9UJ05tvXjspJ55YX1uepkkef3zlv+fLL6/f6/OfX++KHG9dm3PPTV7+8sXP11+//hwWdaYee6wuwbHaanUe3eTJyX/9V/LGNw7v5wWjTZgCJpTddqsT1ddZZ/GxzTaryzbcfXddsuHFL64h6DvfST7xidrBuuSS5K1vravAz52bnHFG/bzDDjWcvepVyezZdX7WYYcl119fhxzPOy/59Kfrtjy77FKHHO+6q06svvXWuqzE+9+f7L13DRHTp9dO2POeV4//+tfL/l7uvbfOA9tnnzpR//jj6zyzQw99aqB64ok6RHbNNSP0Q22haZJzzqnf/yKL5rBtvnmy8cb18ZZb1mHcK6+s86aOO64O3X7zm/X1O+8c1bJhpdibD2A55s6tQeu+++oE6m22WfodZ01Tf/GfeGLddPqxx2onbJdd6ryuHXesHxttVF+79trkP/6jDjuuv34NaBtsUO9GvP/+GtLOPDN5zWvqsOSiAPL447UjtsUWtdt27bXJm99cX7vrrtrh+cxnFi8xcdVVdbL3hhvWOWaDhzRHwxVX1HpvvHFxF/HrX681L7qT74ADkkcfTW6+uQ7JnnBCcvDB9Xu/9NIaPM89twbGH/ygDs2uu+7ofh+wvL35hCmADi3qKp1xRvLAAzW8rbVWnah/6KFP7bAt8tBDdW7YFVfUcHXcccnb3lbDybvfXfdbfM1r6iTva6+tIeqWW2rn6s1vrhP9Z8yooe2WW+rHzTfXz5Mn11XKZ81Kdt+9hsdFK+KvrCefrMN7e+9du3CL/OQn9Xu79946hLnPPos7T489VruDz31uctttNWw9+WS9KeHtb6+Lvj7zmTWY3nNP7dw98kgNrGuuWX+ei9Y2g14SpgAmmKapK4dvscXTV67/6U/rcgOzZtU7Hxe9fuONyZe/nFxwQR3yXHvtepfdZpvVz5tuWsPIHXfUkPOLX9ThzPXXr0Nyi4YpV1+9fh78ePXVa9dr8uQ6r+3BB+sw6mOP1UA0ZdDtTjffXNcaO+WUGpQuv7wOpw7+3i64oA7HDj72ne/U4dbPf75+vSeeqF9nyy3rHYMzZ9av9Y531KD1whfWYd+bb67BMKmBa/r0+vXcYMDKEKYAGJaFC2un6A9/qJ2kuXPrkNyjjz798bx5NRw9+WQNLHvsUeeVrbLKyNW3KBTdc0/twm28cfLRjybbbZdcfHENkBtsUCe0T5pU17N69rPr97T99jVQrrZavVFhtdVqKFx99WU/Xnvt2hlbddX6scoq9WNSn8xAvu++eift2WfXn/s731k7nf1geWHK0ggALNOUKbVzteTG2GPFou7Seustnlf2ta89/bxFq6svXFiD4QYb1O7Xojss77yzdtHmzl083Dp37uJjiz7ff39dLHbBgjr0uGBB/ZgyZXG4WhS0lvZ8KOes7PNFxyZPrj+PSZNW/Hmo5zzwQL1J4rLL6tzBq6+ud72+9a31Z3LUUbWDeMQR9Wc6d24ddv397+sw9F/8RZ0Tt+SG6E1Tr8njj9d5hOM9jOpMAUALTVND2vz5iz8Wha1ePR/KOYu6gk0ztM9DOWeNNeq8uZ13rp283Xd/6rDyAw/U5SzOOqsGzenT640XM2bUodvzz69dwc03r0O/i+YEPvxwrX/atNrZnDmzBq4pU+qxLbaoX2vKlKd2/yZNWlzbtGn1zzz8cPKXf1k7jiPJMB8A0ImHHqrdwLXXrkOlDzxQg9KGG9Yu2AMP1EVaH3+8htK5c+tejvPmLe78LVhQQ9QTTyzuns2bV89dc83kr/+6DtWOJMN8AEAn1lyzzmFbZMllLdZeu3a+xrMhjVKWUvYtpfyulHJ9KeUDS3l9ainl2wOv/7KUsnnPKwUAGINWGKZKKZOTnJBkvySzkryxlDJridOOSPKnpmm2SvLpJJ/sdaEAAGPRUDpTuyS5vmmaG5ummZ/ktCQHLXHOQUlOGXj83SQvLcUKHgDAxDeUMDUjya2Dnt82cGyp5zRNszDJg0mWsm4vAMDEMqoT0EspRyY5cuDpI6WU343wW66b5N4Rfg9WnusyNrkuY49rMja5LmPTSF+XZa62NpQwdXuSwTccbjxwbGnn3FZKmZJkrST3LfmFmqY5KclJQ3jPniilzFnWbYx0x3UZm1yXscc1GZtcl7Gpy+sylGG+S5LMLKVsUUpZNckhSc5Y4pwzkrxl4PHrkvy06WoBKwCAUbTCzlTTNAtLKe9Kck6SyUm+3DTNVaWUY5PMaZrmjCRfSnJqKeX6JPenBi4AgAlvSHOmmqY5O8nZSxz7yKDH85Ic3NvSemLUhhRZKa7L2OS6jD2uydjkuoxNnV2XzraTAQCYCMb5Ps0AAN2asGFqRVvgMHJKKV8updxdSrly0LFnlVLOLaVcN/D5mQPHSynlswPX6bellB27q3ziKqVsUko5v5RydSnlqlLK0QPHXZcOlVKmlVJ+VUq5fOC6fGzg+BYDW3NdP7BV16oDx23dNUpKKZNLKb8ppZw18Nw16Vgp5Q+llCtKKZeVUuYMHBsTf4dNyDA1xC1wGDlfTbLvEsc+kOS8pmlmJjlv4HlSr9HMgY8jk3xhlGrsNwuTHNM0zawkuyU5auD/CdelW48neUnTNNsneX6SfUspu6VuyfXpgS26/pS6ZVdi667RdHSSawY9d03Ghr2apnn+oCUQxsTfYRMyTGVoW+AwQpqmuSD1rs7BBm85dEqSVw06/rWmujjJ2qWUjUal0D7SNM0fm6b59cDjh1N/ScyI69KpgZ/vIwNPVxn4aJK8JHVrruTp18XWXSOslLJxkv2T/NvA8xLXZKwaE3+HTdQwNZQtcBhdGzRN88eBx3cm2WDgsWs1ygaGIXZI8su4Lp0bGE66LMndSc5NckOSBwa25kqe+rO3ddfo+EyS9yV5cuD5OnFNxoImyU9KKZcO7KiSjJG/w0Z1OxlI6r/GSyluI+1AKeUZSf49yf/XNM1Dg/8B7bp0o2maJ5I8v5SydpLvJdm624r6WynlgCR3N01zaSllz47L4ale2DTN7aWU9ZOcW0q5dvCLXf4dNlE7U0PZAofRddeiFuvA57sHjrtWo6SUskpqkPpG0zT/MXDYdRkjmqZ5IMn5SV6QOiSx6B+7g3/2/3NdynK27qKV3ZMcWEr5Q+oUkZckOT6uSeeaprl94PPdqf/w2CVj5O+wiRqmhrIFDqNr8JZDb0nyg0HHDx+482K3JA8OatnSIwNzOL6U5JqmaT416CXXpUOllPUGOlIppayW5OWp89nOT92aK3n6dbF11whqmuaDTdNs3DTN5qm/O37aNM1hcU06VUqZXkpZY9HjJHsnuTJj5O+wCbtoZynlFanj3ou2wPnHbivqH6WUbyXZM3UH77uSfDTJ95OcnmTTJDcneX3TNPcP/JL/19S7/+YmeWvTNHM6KHtCK6W8MMmFSa7I4nkgH0qdN+W6dKSUsl3qpNnJqf+4Pb1pmmNLKVumdkWeleQ3Sd7UNM3jpZRpSU5NnfN2f5JDmqa5sZvqJ76BYb73Nk1zgGvSrYGf//cGnk5J8s2maf6xlLJOxsDfYRM2TAEAjIaJOswHADAqhCkAgBaEKQCAFoQpAIAWhCkAgBaEKQCAFoQpAIAWhCkAgBb+HycjL0XBeJ+jAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 720x720 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "epoch = [i for i in range(dfm_params['epoch'])]\n",
    "plt.figure(figsize=(10,10))\n",
    "plt.plot(epoch, loss_polt, label='epoch', color='blue',linewidth=1)#线1\n",
    "#plt.plot(id, res2, label='loss',color='green',linewidth=1)#线2\n",
    "plt.ylim(0,1)\n",
    "\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From C:\\Users\\S\\AppData\\Local\\Temp/ipykernel_14420/1236238620.py:1: The name tf.train.Saver is deprecated. Please use tf.compat.v1.train.Saver instead.\n",
      "\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "'./model/my_test_model'"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "saver = tf.train.Saver()\n",
    "saver.save(sess, './model/my_test_model')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [],
   "source": [
    "sess.close()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [conda env:tf1.14]",
   "language": "python",
   "name": "conda-env-tf1.14-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.11"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
