{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "导入各种包"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "from tensorflow import keras\n",
    "\n",
    "import torch\n",
    "from torchvision import models\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "from torch.autograd import Variable\n",
    "import torch.optim as optim\n",
    "\n",
    "from tinymlgen import port\n",
    "import math\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "构建训练集、验证集、测试集，比例是6：2：2，总数是1000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "SAMPLES = 1000\n",
    "\n",
    "x_values = np.random.uniform(\n",
    "    low=0, high=2*math.pi, size=SAMPLES).astype(np.float32)\n",
    "\n",
    "np.random.shuffle(x_values)\n",
    "\n",
    "y_values = np.sin(x_values).astype(np.float32)\n",
    "\n",
    "y_values += 0.1 * np.random.randn(*y_values.shape)\n",
    "\n",
    "TRAIN_SPLIT =  int(0.6 * SAMPLES)\n",
    "TEST_SPLIT = int(0.2 * SAMPLES + TRAIN_SPLIT)\n",
    "\n",
    "x_train, x_test, x_validate = np.split(x_values, [TRAIN_SPLIT, TEST_SPLIT])\n",
    "y_train, y_test, y_validate = np.split(y_values, [TRAIN_SPLIT, TEST_SPLIT])\n",
    "\n",
    "assert (x_train.size + x_validate.size + x_test.size) ==  SAMPLES"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "构建pytorch神经网络的模型结构，结构为最简单的全连接层，和tensoflow lite的hello world例程中网络结构相同"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": [
    "class PytorchSinNet(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(PytorchSinNet, self).__init__()\n",
    "        self.input_layer = nn.Linear(1, 16)\n",
    "        self.hidden_layer = nn.Linear(16, 16)\n",
    "        self.output_layer = nn.Linear(16, 1)\n",
    "\n",
    "    def forward(self, x):\n",
    "        x = F.relu(self.input_layer(x))\n",
    "        x = F.relu(self.hidden_layer(x))\n",
    "        x = self.output_layer(x)\n",
    "        return x"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "构建和上述pytorch神经网络结构相同的keras模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "def KerasSinNet():\n",
    "    model = keras.Sequential()\n",
    "    model.add(keras.layers.Dense(16, activation='relu', input_shape=(1,)))\n",
    "    model.add(keras.layers.Dense(16, activation='relu'))\n",
    "    model.add(keras.layers.Dense(1))\n",
    "    model.compile(optimizer='adam', loss=\"mse\", metrics=[\"mae\"])\n",
    "\n",
    "    return model"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "pytorch模型转keras模型的类，支持全连接层、卷积层、BN层，其他类型的层可参考自行实现"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "class PytorchToKeras(object):\n",
    "    def __init__(self, pModel, kModel):\n",
    "        super(PytorchToKeras, self)\n",
    "        self.__source_layers = []\n",
    "        self.__target_layers = []\n",
    "        self.pModel = pModel\n",
    "        self.kModel = kModel\n",
    "        tf.keras.backend.set_learning_phase(0)\n",
    "\n",
    "    def __retrieve_k_layers(self):\n",
    "        for i, layer in enumerate(self.kModel.layers):\n",
    "            if len(layer.weights) > 0:\n",
    "                self.__target_layers.append(i)\n",
    "\n",
    "    def __retrieve_p_layers(self, input_size):\n",
    "        input = torch.randn(input_size)\n",
    "        input = Variable(input.unsqueeze(0))\n",
    "        hooks = []\n",
    "        def add_hooks(module):\n",
    "            def hook(module, input, output):\n",
    "                if hasattr(module, \"weight\"):\n",
    "                    # print(module)\n",
    "                    self.__source_layers.append(module)\n",
    "            if not isinstance(module, nn.ModuleList) and not isinstance(module, nn.Sequential) and module != self.pModel:\n",
    "                hooks.append(module.register_forward_hook(hook))\n",
    "        self.pModel.apply(add_hooks)\n",
    "        self.pModel(input)\n",
    "        for hook in hooks:\n",
    "            hook.remove()\n",
    "\n",
    "    def convert(self, input_size):\n",
    "        self.__retrieve_k_layers()\n",
    "        self.__retrieve_p_layers(input_size)\n",
    "\n",
    "        for i, (source_layer, target_layer) in enumerate(zip(self.__source_layers, self.__target_layers)):\n",
    "            print(source_layer)\n",
    "            weight_size = len(source_layer.weight.data.size())\n",
    "            transpose_dims = []\n",
    "            for i in range(weight_size):\n",
    "                transpose_dims.append(weight_size - i - 1)\n",
    "            if isinstance(source_layer, nn.Conv2d):\n",
    "                transpose_dims = [2,3,1,0]\n",
    "                self.kModel.layers[target_layer].set_weights([source_layer.weight.data.numpy(\n",
    "                ).transpose(transpose_dims), source_layer.bias.data.numpy()])\n",
    "            elif isinstance(source_layer, nn.BatchNorm2d):\n",
    "                self.kModel.layers[target_layer].set_weights([source_layer.weight.data.numpy(), source_layer.bias.data.numpy(),\n",
    "                                                              source_layer.running_mean.data.numpy(), source_layer.running_var.data.numpy()])\n",
    "            elif isinstance(source_layer, nn.Linear):\n",
    "                self.kModel.layers[target_layer].set_weights([source_layer.weight.data.numpy().T, source_layer.bias.data.numpy().T])\n",
    "\n",
    "    def save_model(self, output_file):\n",
    "        self.kModel.save(output_file)\n",
    "\n",
    "    def save_weights(self, output_file):\n",
    "        self.kModel.save_weights(output_file, save_format='h5')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "用于训练pytorch神经网络时，取一个batch的数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_batch_data(x_dataset, y_dataset, batch_size):\n",
    "    dataset_length = len(x_dataset)\n",
    "    index = np.random.randint(0, dataset_length, batch_size)\n",
    "    x_batch = torch.tensor(x_dataset[index]).resize(batch_size,1)\n",
    "    y_batch = torch.tensor(y_dataset[index]).resize(batch_size,1)\n",
    "\n",
    "    return x_batch, y_batch"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "训练pytorch神经网络"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "one epoch length: 10\n",
      "epoch:0------train loss:0.8691226243972778------validate loss:0.6217939853668213\n",
      "epoch:1------train loss:0.5954238772392273------validate loss:0.43671348690986633\n",
      "epoch:2------train loss:0.4213774800300598------validate loss:0.37963783740997314\n",
      "epoch:3------train loss:0.3601967692375183------validate loss:0.36287325620651245\n",
      "epoch:4------train loss:0.33182492852211------validate loss:0.35451048612594604\n",
      "epoch:5------train loss:0.33565980195999146------validate loss:0.34688544273376465\n",
      "epoch:6------train loss:0.3241201937198639------validate loss:0.338534951210022\n",
      "epoch:7------train loss:0.30557307600975037------validate loss:0.3297012746334076\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "d:\\Anaconda3\\lib\\site-packages\\torch\\_tensor.py:586: UserWarning: non-inplace resize is deprecated\n",
      "  warnings.warn(\"non-inplace resize is deprecated\")\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch:8------train loss:0.3169418275356293------validate loss:0.3213631808757782\n",
      "epoch:9------train loss:0.32131415605545044------validate loss:0.31254711747169495\n",
      "epoch:10------train loss:0.27836865186691284------validate loss:0.304341584444046\n",
      "epoch:11------train loss:0.29836592078208923------validate loss:0.294969767332077\n",
      "epoch:12------train loss:0.2765637934207916------validate loss:0.2853837311267853\n",
      "epoch:13------train loss:0.25399094820022583------validate loss:0.27619317173957825\n",
      "epoch:14------train loss:0.27464357018470764------validate loss:0.2673548460006714\n",
      "epoch:15------train loss:0.25899842381477356------validate loss:0.25839582085609436\n",
      "epoch:16------train loss:0.24174876511096954------validate loss:0.2487240731716156\n",
      "epoch:17------train loss:0.2436123788356781------validate loss:0.24081043899059296\n",
      "epoch:18------train loss:0.23722270131111145------validate loss:0.23172029852867126\n",
      "epoch:19------train loss:0.22101232409477234------validate loss:0.22162814438343048\n",
      "epoch:20------train loss:0.2139074057340622------validate loss:0.21313060820102692\n",
      "epoch:21------train loss:0.2079557627439499------validate loss:0.20433799922466278\n",
      "epoch:22------train loss:0.19738134741783142------validate loss:0.19488559663295746\n",
      "epoch:23------train loss:0.19167007505893707------validate loss:0.18628168106079102\n",
      "epoch:24------train loss:0.17683729529380798------validate loss:0.17897634208202362\n",
      "epoch:25------train loss:0.1727714240550995------validate loss:0.17207473516464233\n",
      "epoch:26------train loss:0.169312983751297------validate loss:0.1650974154472351\n",
      "epoch:27------train loss:0.16605523228645325------validate loss:0.15898242592811584\n",
      "epoch:28------train loss:0.15790942311286926------validate loss:0.15318670868873596\n",
      "epoch:29------train loss:0.14810515940189362------validate loss:0.1513933390378952\n",
      "epoch:30------train loss:0.14121350646018982------validate loss:0.14587435126304626\n",
      "epoch:31------train loss:0.1413848102092743------validate loss:0.13839049637317657\n",
      "epoch:32------train loss:0.1319088339805603------validate loss:0.13366790115833282\n",
      "epoch:33------train loss:0.12772242724895477------validate loss:0.1296607404947281\n",
      "epoch:34------train loss:0.11704261600971222------validate loss:0.127607062458992\n",
      "epoch:35------train loss:0.12744691967964172------validate loss:0.1233726516366005\n",
      "epoch:36------train loss:0.1223485916852951------validate loss:0.12015179544687271\n",
      "epoch:37------train loss:0.11948609352111816------validate loss:0.11730030924081802\n",
      "epoch:38------train loss:0.10271580517292023------validate loss:0.11449956148862839\n",
      "epoch:39------train loss:0.10356607288122177------validate loss:0.1142888143658638\n",
      "epoch:40------train loss:0.10094790160655975------validate loss:0.11270370334386826\n",
      "epoch:41------train loss:0.11086289584636688------validate loss:0.10829871147871017\n",
      "epoch:42------train loss:0.1073312759399414------validate loss:0.10572849214076996\n",
      "epoch:43------train loss:0.108012855052948------validate loss:0.10454165190458298\n",
      "epoch:44------train loss:0.09877106547355652------validate loss:0.10312400758266449\n",
      "epoch:45------train loss:0.10697953402996063------validate loss:0.10398433357477188\n",
      "epoch:46------train loss:0.09848371893167496------validate loss:0.10124002397060394\n",
      "epoch:47------train loss:0.1030319333076477------validate loss:0.09964605420827866\n",
      "epoch:48------train loss:0.08425597101449966------validate loss:0.1000140979886055\n",
      "epoch:49------train loss:0.0991445928812027------validate loss:0.09723503142595291\n",
      "epoch:50------train loss:0.09749053418636322------validate loss:0.09566018730401993\n",
      "epoch:51------train loss:0.09030353277921677------validate loss:0.09384941309690475\n",
      "epoch:52------train loss:0.0921730026602745------validate loss:0.0950242429971695\n",
      "epoch:53------train loss:0.08663328737020493------validate loss:0.09316252917051315\n",
      "epoch:54------train loss:0.0904635637998581------validate loss:0.09182566404342651\n",
      "epoch:55------train loss:0.09601671993732452------validate loss:0.09046361595392227\n",
      "epoch:56------train loss:0.08468297868967056------validate loss:0.09084399044513702\n",
      "epoch:57------train loss:0.08569677174091339------validate loss:0.09024828672409058\n",
      "epoch:58------train loss:0.083978071808815------validate loss:0.08897322416305542\n",
      "epoch:59------train loss:0.0953725129365921------validate loss:0.0872284471988678\n",
      "epoch:60------train loss:0.08613453060388565------validate loss:0.0894886702299118\n",
      "epoch:61------train loss:0.08427272737026215------validate loss:0.08533907681703568\n",
      "epoch:62------train loss:0.08693396300077438------validate loss:0.08742912113666534\n",
      "epoch:63------train loss:0.08210881054401398------validate loss:0.08538471907377243\n",
      "epoch:64------train loss:0.09053726494312286------validate loss:0.0825766921043396\n",
      "epoch:65------train loss:0.07209880650043488------validate loss:0.08590523898601532\n",
      "epoch:66------train loss:0.07833920419216156------validate loss:0.08464124798774719\n",
      "epoch:67------train loss:0.08587516099214554------validate loss:0.08116797357797623\n",
      "epoch:68------train loss:0.07423893362283707------validate loss:0.08134110271930695\n",
      "epoch:69------train loss:0.07373195886611938------validate loss:0.08137529343366623\n",
      "epoch:70------train loss:0.0756656602025032------validate loss:0.08034566044807434\n",
      "epoch:71------train loss:0.08017388731241226------validate loss:0.079307422041893\n",
      "epoch:72------train loss:0.07143674045801163------validate loss:0.07739626616239548\n",
      "epoch:73------train loss:0.07325908541679382------validate loss:0.07895734161138535\n",
      "epoch:74------train loss:0.07422877848148346------validate loss:0.07688740640878677\n",
      "epoch:75------train loss:0.09447891265153885------validate loss:0.0760369524359703\n",
      "epoch:76------train loss:0.08455047011375427------validate loss:0.0740952417254448\n",
      "epoch:77------train loss:0.08409976959228516------validate loss:0.07335620373487473\n",
      "epoch:78------train loss:0.0745849758386612------validate loss:0.07326273620128632\n",
      "epoch:79------train loss:0.07600651681423187------validate loss:0.07345019280910492\n",
      "epoch:80------train loss:0.07447190582752228------validate loss:0.07200507074594498\n",
      "epoch:81------train loss:0.06818704307079315------validate loss:0.07083454728126526\n",
      "epoch:82------train loss:0.08083788305521011------validate loss:0.07071331888437271\n",
      "epoch:83------train loss:0.06338871270418167------validate loss:0.06912551820278168\n",
      "epoch:84------train loss:0.06687533855438232------validate loss:0.07022729516029358\n",
      "epoch:85------train loss:0.07448233664035797------validate loss:0.06825994700193405\n",
      "epoch:86------train loss:0.06377413123846054------validate loss:0.0680263340473175\n",
      "epoch:87------train loss:0.0674809068441391------validate loss:0.07030059397220612\n",
      "epoch:88------train loss:0.059007883071899414------validate loss:0.06684906780719757\n",
      "epoch:89------train loss:0.07622967660427094------validate loss:0.06517617404460907\n",
      "epoch:90------train loss:0.06447751820087433------validate loss:0.06475391238927841\n",
      "epoch:91------train loss:0.055843718349933624------validate loss:0.06677961349487305\n",
      "epoch:92------train loss:0.07355687767267227------validate loss:0.06279939413070679\n",
      "epoch:93------train loss:0.06768740713596344------validate loss:0.06177015230059624\n",
      "epoch:94------train loss:0.06542564928531647------validate loss:0.06106335669755936\n",
      "epoch:95------train loss:0.053494758903980255------validate loss:0.061138272285461426\n",
      "epoch:96------train loss:0.06054913252592087------validate loss:0.06045669689774513\n",
      "epoch:97------train loss:0.06489387899637222------validate loss:0.05867141857743263\n",
      "epoch:98------train loss:0.05846172571182251------validate loss:0.05755371227860451\n",
      "epoch:99------train loss:0.04669466242194176------validate loss:0.06327274441719055\n",
      "epoch:100------train loss:0.07131845504045486------validate loss:0.0566733218729496\n",
      "epoch:101------train loss:0.05735307186841965------validate loss:0.055286955088377\n",
      "epoch:102------train loss:0.05801864340901375------validate loss:0.0556204691529274\n",
      "epoch:103------train loss:0.058984510600566864------validate loss:0.054294537752866745\n",
      "epoch:104------train loss:0.058489035815000534------validate loss:0.053617868572473526\n",
      "epoch:105------train loss:0.05572112649679184------validate loss:0.05361977219581604\n",
      "epoch:106------train loss:0.056427113711833954------validate loss:0.05167077109217644\n",
      "epoch:107------train loss:0.05597226694226265------validate loss:0.05109322443604469\n",
      "epoch:108------train loss:0.04840073734521866------validate loss:0.05164240673184395\n",
      "epoch:109------train loss:0.04330630972981453------validate loss:0.05476957932114601\n",
      "epoch:110------train loss:0.048849016427993774------validate loss:0.0505668930709362\n",
      "epoch:111------train loss:0.047547392547130585------validate loss:0.04879828542470932\n",
      "epoch:112------train loss:0.05070404335856438------validate loss:0.04772345721721649\n",
      "epoch:113------train loss:0.05594742298126221------validate loss:0.046794258058071136\n",
      "epoch:114------train loss:0.04332905262708664------validate loss:0.049681100994348526\n",
      "epoch:115------train loss:0.05347108095884323------validate loss:0.047077227383852005\n",
      "epoch:116------train loss:0.03891506791114807------validate loss:0.045422859489917755\n",
      "epoch:117------train loss:0.04637029021978378------validate loss:0.04370963200926781\n",
      "epoch:118------train loss:0.04307207465171814------validate loss:0.04582752659916878\n",
      "epoch:119------train loss:0.047421615570783615------validate loss:0.043014172464609146\n",
      "epoch:120------train loss:0.052673012018203735------validate loss:0.04218240827322006\n",
      "epoch:121------train loss:0.04339411109685898------validate loss:0.04178232699632645\n",
      "epoch:122------train loss:0.04581909999251366------validate loss:0.04048307240009308\n",
      "epoch:123------train loss:0.04959545657038689------validate loss:0.039532795548439026\n",
      "epoch:124------train loss:0.04677705094218254------validate loss:0.03873530030250549\n",
      "epoch:125------train loss:0.045392364263534546------validate loss:0.03898265212774277\n",
      "epoch:126------train loss:0.04060392081737518------validate loss:0.03707023710012436\n",
      "epoch:127------train loss:0.03697727620601654------validate loss:0.0367891900241375\n",
      "epoch:128------train loss:0.039942435920238495------validate loss:0.03790069743990898\n",
      "epoch:129------train loss:0.03812897577881813------validate loss:0.03563191741704941\n",
      "epoch:130------train loss:0.03483200818300247------validate loss:0.03721250593662262\n",
      "epoch:131------train loss:0.04358239099383354------validate loss:0.03464293107390404\n",
      "epoch:132------train loss:0.03960622474551201------validate loss:0.03450430929660797\n",
      "epoch:133------train loss:0.03359469026327133------validate loss:0.03428396210074425\n",
      "epoch:134------train loss:0.0352078452706337------validate loss:0.03372747078537941\n",
      "epoch:135------train loss:0.036645542830228806------validate loss:0.03181738778948784\n",
      "epoch:136------train loss:0.0335281640291214------validate loss:0.033582549542188644\n",
      "epoch:137------train loss:0.03630354627966881------validate loss:0.030789783224463463\n",
      "epoch:138------train loss:0.031363364309072495------validate loss:0.030756855383515358\n",
      "epoch:139------train loss:0.03389189392328262------validate loss:0.030048441141843796\n",
      "epoch:140------train loss:0.02914288081228733------validate loss:0.02915724366903305\n",
      "epoch:141------train loss:0.029853764921426773------validate loss:0.029598932713270187\n",
      "epoch:142------train loss:0.037629492580890656------validate loss:0.02832123264670372\n",
      "epoch:143------train loss:0.03203965723514557------validate loss:0.027924953028559685\n",
      "epoch:144------train loss:0.0312904417514801------validate loss:0.02772364392876625\n",
      "epoch:145------train loss:0.03189868479967117------validate loss:0.026678521186113358\n",
      "epoch:146------train loss:0.030440110713243484------validate loss:0.02685464918613434\n",
      "epoch:147------train loss:0.03112155571579933------validate loss:0.026851443573832512\n",
      "epoch:148------train loss:0.027893060818314552------validate loss:0.02682788111269474\n",
      "epoch:149------train loss:0.030089784413576126------validate loss:0.028336696326732635\n",
      "epoch:150------train loss:0.03073597513139248------validate loss:0.02508879452943802\n",
      "epoch:151------train loss:0.029497195035219193------validate loss:0.026102660223841667\n",
      "epoch:152------train loss:0.025532376021146774------validate loss:0.023769531399011612\n",
      "epoch:153------train loss:0.025737499818205833------validate loss:0.023748483508825302\n",
      "epoch:154------train loss:0.02306489646434784------validate loss:0.024795344099402428\n",
      "epoch:155------train loss:0.02430989034473896------validate loss:0.022524554282426834\n",
      "epoch:156------train loss:0.026365239173173904------validate loss:0.024456758052110672\n",
      "epoch:157------train loss:0.023423204198479652------validate loss:0.022378265857696533\n",
      "epoch:158------train loss:0.028665265068411827------validate loss:0.022387804463505745\n",
      "epoch:159------train loss:0.020822081714868546------validate loss:0.021450556814670563\n",
      "epoch:160------train loss:0.026014547795057297------validate loss:0.021344123408198357\n",
      "epoch:161------train loss:0.02342906780540943------validate loss:0.020866865292191505\n",
      "epoch:162------train loss:0.02393008954823017------validate loss:0.02187916822731495\n",
      "epoch:163------train loss:0.024178776890039444------validate loss:0.020331844687461853\n",
      "epoch:164------train loss:0.02406458929181099------validate loss:0.01966647244989872\n",
      "epoch:165------train loss:0.022506339475512505------validate loss:0.020054146647453308\n",
      "epoch:166------train loss:0.02349831722676754------validate loss:0.019485455006361008\n",
      "epoch:167------train loss:0.023622529581189156------validate loss:0.01956828124821186\n",
      "epoch:168------train loss:0.02260877564549446------validate loss:0.01836557686328888\n",
      "epoch:169------train loss:0.02233215793967247------validate loss:0.019313717260956764\n",
      "epoch:170------train loss:0.0218182560056448------validate loss:0.01789357140660286\n",
      "epoch:171------train loss:0.024205291643738747------validate loss:0.01915380172431469\n",
      "epoch:172------train loss:0.021199973300099373------validate loss:0.018088817596435547\n",
      "epoch:173------train loss:0.024720573797822------validate loss:0.017096245661377907\n",
      "epoch:174------train loss:0.021061331033706665------validate loss:0.017442351207137108\n",
      "epoch:175------train loss:0.020985301584005356------validate loss:0.017054686322808266\n",
      "epoch:176------train loss:0.020855039358139038------validate loss:0.01729682646691799\n",
      "epoch:177------train loss:0.020573902875185013------validate loss:0.016787176951766014\n",
      "epoch:178------train loss:0.017801254987716675------validate loss:0.016741469502449036\n",
      "epoch:179------train loss:0.01753167249262333------validate loss:0.016858000308275223\n",
      "epoch:180------train loss:0.02168787457048893------validate loss:0.01559511385858059\n",
      "epoch:181------train loss:0.019373413175344467------validate loss:0.015952598303556442\n",
      "epoch:182------train loss:0.018215380609035492------validate loss:0.015393839217722416\n",
      "epoch:183------train loss:0.017030630260705948------validate loss:0.015992287546396255\n",
      "epoch:184------train loss:0.017176035791635513------validate loss:0.015303612686693668\n",
      "epoch:185------train loss:0.01838350109755993------validate loss:0.015613513067364693\n",
      "epoch:186------train loss:0.01870676502585411------validate loss:0.014829302206635475\n",
      "epoch:187------train loss:0.022389430552721024------validate loss:0.015044542960822582\n",
      "epoch:188------train loss:0.015753569081425667------validate loss:0.01570780947804451\n",
      "epoch:189------train loss:0.019258491694927216------validate loss:0.014469975605607033\n",
      "epoch:190------train loss:0.018352322280406952------validate loss:0.015537474304437637\n",
      "epoch:191------train loss:0.015763919800519943------validate loss:0.016397828236222267\n",
      "epoch:192------train loss:0.01794527843594551------validate loss:0.014774437062442303\n",
      "epoch:193------train loss:0.01830798014998436------validate loss:0.014361836016178131\n",
      "epoch:194------train loss:0.020060908049345016------validate loss:0.013955842703580856\n",
      "epoch:195------train loss:0.018084606155753136------validate loss:0.013946706429123878\n",
      "epoch:196------train loss:0.01724918559193611------validate loss:0.01388757023960352\n",
      "epoch:197------train loss:0.015910614281892776------validate loss:0.01542073767632246\n",
      "epoch:198------train loss:0.017918843775987625------validate loss:0.013709834776818752\n",
      "epoch:199------train loss:0.015276143327355385------validate loss:0.014092356897890568\n",
      "epoch:200------train loss:0.015408968552947044------validate loss:0.013377352617681026\n",
      "epoch:201------train loss:0.01742960698902607------validate loss:0.013346855528652668\n",
      "epoch:202------train loss:0.015425605699419975------validate loss:0.012922792695462704\n",
      "epoch:203------train loss:0.014616815373301506------validate loss:0.013197148218750954\n",
      "epoch:204------train loss:0.014244127087295055------validate loss:0.012818966060876846\n",
      "epoch:205------train loss:0.014025690034031868------validate loss:0.013691242784261703\n",
      "epoch:206------train loss:0.017117047682404518------validate loss:0.012503871694207191\n",
      "epoch:207------train loss:0.01574830338358879------validate loss:0.012958040460944176\n",
      "epoch:208------train loss:0.015253959223628044------validate loss:0.014046994037926197\n",
      "epoch:209------train loss:0.014891408383846283------validate loss:0.013064611703157425\n",
      "epoch:210------train loss:0.01559417974203825------validate loss:0.014110391959547997\n",
      "epoch:211------train loss:0.014012026600539684------validate loss:0.012217147275805473\n",
      "epoch:212------train loss:0.01335243321955204------validate loss:0.013830851763486862\n",
      "epoch:213------train loss:0.01533852331340313------validate loss:0.011917305178940296\n",
      "epoch:214------train loss:0.014019155874848366------validate loss:0.012416989542543888\n",
      "epoch:215------train loss:0.01542776357382536------validate loss:0.01176427025347948\n",
      "epoch:216------train loss:0.0129159614443779------validate loss:0.012871813960373402\n",
      "epoch:217------train loss:0.013597396202385426------validate loss:0.011796219274401665\n",
      "epoch:218------train loss:0.014280376955866814------validate loss:0.012355571612715721\n",
      "epoch:219------train loss:0.014907607808709145------validate loss:0.011892521753907204\n",
      "epoch:220------train loss:0.014180260710418224------validate loss:0.012005489319562912\n",
      "epoch:221------train loss:0.012762278318405151------validate loss:0.01160428300499916\n",
      "epoch:222------train loss:0.015662766993045807------validate loss:0.011953840032219887\n",
      "epoch:223------train loss:0.013905349187552929------validate loss:0.01164990197867155\n",
      "epoch:224------train loss:0.012587392702698708------validate loss:0.012038379907608032\n",
      "epoch:225------train loss:0.012154679745435715------validate loss:0.01172722689807415\n",
      "epoch:226------train loss:0.014229902997612953------validate loss:0.011391067877411842\n",
      "epoch:227------train loss:0.01371485460549593------validate loss:0.012050382792949677\n",
      "epoch:228------train loss:0.01346801221370697------validate loss:0.01111324317753315\n",
      "epoch:229------train loss:0.01377115584909916------validate loss:0.013151019811630249\n",
      "epoch:230------train loss:0.014297084882855415------validate loss:0.011101296171545982\n",
      "epoch:231------train loss:0.014577507972717285------validate loss:0.012179282493889332\n",
      "epoch:232------train loss:0.01228414848446846------validate loss:0.01116264145821333\n",
      "epoch:233------train loss:0.011996477842330933------validate loss:0.011825731955468655\n",
      "epoch:234------train loss:0.013567866757512093------validate loss:0.011363673023879528\n",
      "epoch:235------train loss:0.012254439294338226------validate loss:0.011766442097723484\n",
      "epoch:236------train loss:0.013351967558264732------validate loss:0.010843523778021336\n",
      "epoch:237------train loss:0.011659296229481697------validate loss:0.01104876771569252\n",
      "epoch:238------train loss:0.012672893702983856------validate loss:0.011219528503715992\n",
      "epoch:239------train loss:0.012361844070255756------validate loss:0.011275180615484715\n",
      "epoch:240------train loss:0.012048671953380108------validate loss:0.011006062850356102\n",
      "epoch:241------train loss:0.013196970336139202------validate loss:0.010747695341706276\n",
      "epoch:242------train loss:0.012261276133358479------validate loss:0.011107818223536015\n",
      "epoch:243------train loss:0.012220725417137146------validate loss:0.010852396488189697\n",
      "epoch:244------train loss:0.01155129261314869------validate loss:0.010754959657788277\n",
      "epoch:245------train loss:0.01119230780750513------validate loss:0.010470851324498653\n",
      "epoch:246------train loss:0.012565359473228455------validate loss:0.010993203148245811\n",
      "epoch:247------train loss:0.01178810279816389------validate loss:0.011022969149053097\n",
      "epoch:248------train loss:0.011764520779252052------validate loss:0.011295996606349945\n",
      "epoch:249------train loss:0.012686340138316154------validate loss:0.010524701327085495\n",
      "epoch:250------train loss:0.010977622121572495------validate loss:0.01103815995156765\n",
      "epoch:251------train loss:0.011792514473199844------validate loss:0.010704639367759228\n",
      "epoch:252------train loss:0.012288697995245457------validate loss:0.01037080492824316\n",
      "epoch:253------train loss:0.011793626472353935------validate loss:0.0102871498093009\n",
      "epoch:254------train loss:0.012632953003048897------validate loss:0.011285696178674698\n",
      "epoch:255------train loss:0.011881643906235695------validate loss:0.010835746303200722\n",
      "epoch:256------train loss:0.01212438102811575------validate loss:0.010310976766049862\n",
      "epoch:257------train loss:0.014043701812624931------validate loss:0.011630706489086151\n",
      "epoch:258------train loss:0.011828428134322166------validate loss:0.011492923833429813\n",
      "epoch:259------train loss:0.010771820321679115------validate loss:0.010874752886593342\n",
      "epoch:260------train loss:0.009600239805877209------validate loss:0.010769914835691452\n",
      "epoch:261------train loss:0.012528215534985065------validate loss:0.010944572277367115\n",
      "epoch:262------train loss:0.011561034247279167------validate loss:0.0110536590218544\n",
      "epoch:263------train loss:0.013424413278698921------validate loss:0.010745899751782417\n",
      "epoch:264------train loss:0.01178230345249176------validate loss:0.010765123181045055\n",
      "epoch:265------train loss:0.0116665568202734------validate loss:0.010736210271716118\n",
      "epoch:266------train loss:0.012782554142177105------validate loss:0.010772309266030788\n",
      "epoch:267------train loss:0.011602207086980343------validate loss:0.010371611453592777\n",
      "epoch:268------train loss:0.011238344013690948------validate loss:0.010322107933461666\n",
      "epoch:269------train loss:0.011903280392289162------validate loss:0.010347163304686546\n",
      "epoch:270------train loss:0.01061776652932167------validate loss:0.010317358188331127\n",
      "epoch:271------train loss:0.011163720861077309------validate loss:0.010067700408399105\n",
      "epoch:272------train loss:0.011106578633189201------validate loss:0.010385469533503056\n",
      "epoch:273------train loss:0.011666174046695232------validate loss:0.010352354496717453\n",
      "epoch:274------train loss:0.01056644693017006------validate loss:0.009918988682329655\n",
      "epoch:275------train loss:0.012484285980463028------validate loss:0.010983593761920929\n",
      "epoch:276------train loss:0.011264635249972343------validate loss:0.010096623562276363\n",
      "epoch:277------train loss:0.012047944590449333------validate loss:0.010626151226460934\n",
      "epoch:278------train loss:0.011659249663352966------validate loss:0.009866047650575638\n",
      "epoch:279------train loss:0.012866668403148651------validate loss:0.010207072831690311\n",
      "epoch:280------train loss:0.011370064690709114------validate loss:0.010100370272994041\n",
      "epoch:281------train loss:0.011667588725686073------validate loss:0.010059023275971413\n",
      "epoch:282------train loss:0.011437027715146542------validate loss:0.010377212427556515\n",
      "epoch:283------train loss:0.011497607454657555------validate loss:0.00982279796153307\n",
      "epoch:284------train loss:0.011119166389107704------validate loss:0.010718736797571182\n",
      "epoch:285------train loss:0.010946379974484444------validate loss:0.009783562272787094\n",
      "epoch:286------train loss:0.01021630223840475------validate loss:0.011037129908800125\n",
      "epoch:287------train loss:0.010685062035918236------validate loss:0.009830041788518429\n",
      "epoch:288------train loss:0.010577922686934471------validate loss:0.012322699651122093\n",
      "epoch:289------train loss:0.011324552819132805------validate loss:0.009983858093619347\n",
      "epoch:290------train loss:0.01085847057402134------validate loss:0.010952729731798172\n",
      "epoch:291------train loss:0.011195741593837738------validate loss:0.009856372140347958\n",
      "epoch:292------train loss:0.011473559774458408------validate loss:0.010167943313717842\n",
      "epoch:293------train loss:0.011494448408484459------validate loss:0.010057279840111732\n",
      "epoch:294------train loss:0.012881550006568432------validate loss:0.009987903758883476\n",
      "epoch:295------train loss:0.011346079409122467------validate loss:0.010777149349451065\n",
      "epoch:296------train loss:0.011380793526768684------validate loss:0.010265103541314602\n",
      "epoch:297------train loss:0.01090201921761036------validate loss:0.010494193993508816\n",
      "epoch:298------train loss:0.012676453217864037------validate loss:0.010496062226593494\n",
      "epoch:299------train loss:0.012326912954449654------validate loss:0.009610136970877647\n",
      "epoch:300------train loss:0.011333637870848179------validate loss:0.009952095337212086\n",
      "epoch:301------train loss:0.010678376071155071------validate loss:0.00986064225435257\n",
      "epoch:302------train loss:0.012120306491851807------validate loss:0.010547840967774391\n",
      "epoch:303------train loss:0.011865431442856789------validate loss:0.009695128537714481\n",
      "epoch:304------train loss:0.01024292130023241------validate loss:0.010629015043377876\n",
      "epoch:305------train loss:0.010671216994524002------validate loss:0.010140081867575645\n",
      "epoch:306------train loss:0.009907668456435204------validate loss:0.010126697830855846\n",
      "epoch:307------train loss:0.011390237137675285------validate loss:0.010052746161818504\n",
      "epoch:308------train loss:0.011339632794260979------validate loss:0.009743153117597103\n",
      "epoch:309------train loss:0.01158720813691616------validate loss:0.010579721070826054\n",
      "epoch:310------train loss:0.010285499505698681------validate loss:0.009972531348466873\n",
      "epoch:311------train loss:0.010484189726412296------validate loss:0.010380029678344727\n",
      "epoch:312------train loss:0.010793095454573631------validate loss:0.009783281944692135\n",
      "epoch:313------train loss:0.01194906234741211------validate loss:0.009781469590961933\n",
      "epoch:314------train loss:0.011095225811004639------validate loss:0.010638704523444176\n",
      "epoch:315------train loss:0.010808086022734642------validate loss:0.00980461947619915\n",
      "epoch:316------train loss:0.00976504571735859------validate loss:0.010497624054551125\n",
      "epoch:317------train loss:0.009982990100979805------validate loss:0.009517701342701912\n",
      "epoch:318------train loss:0.01141558401286602------validate loss:0.010141892358660698\n",
      "epoch:319------train loss:0.010302640497684479------validate loss:0.010377428494393826\n",
      "epoch:320------train loss:0.01032907236367464------validate loss:0.010296650230884552\n",
      "epoch:321------train loss:0.011574587784707546------validate loss:0.01012994721531868\n",
      "epoch:322------train loss:0.011663844808936119------validate loss:0.00958849024027586\n",
      "epoch:323------train loss:0.010208009742200375------validate loss:0.010269521735608578\n",
      "epoch:324------train loss:0.011318124830722809------validate loss:0.010510141961276531\n",
      "epoch:325------train loss:0.011439139023423195------validate loss:0.010252488777041435\n",
      "epoch:326------train loss:0.01050817035138607------validate loss:0.010304477997124195\n",
      "epoch:327------train loss:0.011115208268165588------validate loss:0.010290558449923992\n",
      "epoch:328------train loss:0.011242957785725594------validate loss:0.009757252410054207\n",
      "epoch:329------train loss:0.010127784684300423------validate loss:0.009626350365579128\n",
      "epoch:330------train loss:0.011581877246499062------validate loss:0.009659851901233196\n",
      "epoch:331------train loss:0.011065376922488213------validate loss:0.010073869489133358\n",
      "epoch:332------train loss:0.012234198860824108------validate loss:0.009774455800652504\n",
      "epoch:333------train loss:0.011494857259094715------validate loss:0.01104765199124813\n",
      "epoch:334------train loss:0.009494424797594547------validate loss:0.009691054932773113\n",
      "epoch:335------train loss:0.010006209835410118------validate loss:0.01006618607789278\n",
      "epoch:336------train loss:0.01118416991084814------validate loss:0.009687747806310654\n",
      "epoch:337------train loss:0.01264413632452488------validate loss:0.011540049687027931\n",
      "epoch:338------train loss:0.011475995182991028------validate loss:0.010245556943118572\n",
      "epoch:339------train loss:0.010859597474336624------validate loss:0.009701710194349289\n",
      "epoch:340------train loss:0.011623697355389595------validate loss:0.010954865254461765\n",
      "epoch:341------train loss:0.010829707607626915------validate loss:0.009753838181495667\n",
      "epoch:342------train loss:0.010119292885065079------validate loss:0.009458433836698532\n",
      "epoch:343------train loss:0.01029527373611927------validate loss:0.010358601808547974\n",
      "epoch:344------train loss:0.010304916650056839------validate loss:0.009983696043491364\n",
      "epoch:345------train loss:0.011895233765244484------validate loss:0.009995744563639164\n",
      "epoch:346------train loss:0.010766255669295788------validate loss:0.009471719153225422\n",
      "epoch:347------train loss:0.00952188204973936------validate loss:0.009604286402463913\n",
      "epoch:348------train loss:0.010311968624591827------validate loss:0.010000105015933514\n",
      "epoch:349------train loss:0.01088869757950306------validate loss:0.010507335886359215\n",
      "epoch:350------train loss:0.011690880171954632------validate loss:0.010065344162285328\n",
      "epoch:351------train loss:0.010767221450805664------validate loss:0.009785884991288185\n",
      "epoch:352------train loss:0.010093745775520802------validate loss:0.01105465181171894\n",
      "epoch:353------train loss:0.010196000337600708------validate loss:0.009476439096033573\n",
      "epoch:354------train loss:0.010702637955546379------validate loss:0.011290382593870163\n",
      "epoch:355------train loss:0.010943436995148659------validate loss:0.01049512904137373\n",
      "epoch:356------train loss:0.010629670694470406------validate loss:0.0094364108517766\n",
      "epoch:357------train loss:0.011310866102576256------validate loss:0.010420192033052444\n",
      "epoch:358------train loss:0.010533236898481846------validate loss:0.009851797483861446\n",
      "epoch:359------train loss:0.010078278370201588------validate loss:0.009695547632873058\n",
      "epoch:360------train loss:0.011629476211965084------validate loss:0.009896871633827686\n",
      "epoch:361------train loss:0.010513635352253914------validate loss:0.009662449359893799\n",
      "epoch:362------train loss:0.011542538180947304------validate loss:0.009932558983564377\n",
      "epoch:363------train loss:0.011918449774384499------validate loss:0.009910184890031815\n",
      "epoch:364------train loss:0.012224514968693256------validate loss:0.0109026487916708\n",
      "epoch:365------train loss:0.010123277083039284------validate loss:0.009545985609292984\n",
      "epoch:366------train loss:0.011278693564236164------validate loss:0.009881928563117981\n",
      "epoch:367------train loss:0.01011115126311779------validate loss:0.010882838629186153\n",
      "epoch:368------train loss:0.011097114533185959------validate loss:0.00972504261881113\n",
      "epoch:369------train loss:0.011022856459021568------validate loss:0.011098958551883698\n",
      "epoch:370------train loss:0.011008949019014835------validate loss:0.010125050321221352\n",
      "epoch:371------train loss:0.01111765019595623------validate loss:0.010021286085247993\n",
      "epoch:372------train loss:0.010064949281513691------validate loss:0.01091298833489418\n",
      "epoch:373------train loss:0.010916986502707005------validate loss:0.010493067093193531\n",
      "epoch:374------train loss:0.011885855346918106------validate loss:0.009703517891466618\n",
      "epoch:375------train loss:0.011926254257559776------validate loss:0.01054413802921772\n",
      "epoch:376------train loss:0.011223804205656052------validate loss:0.011657669208943844\n",
      "epoch:377------train loss:0.010050470940768719------validate loss:0.009503673762083054\n",
      "epoch:378------train loss:0.011490091681480408------validate loss:0.009724149480462074\n",
      "epoch:379------train loss:0.010350333526730537------validate loss:0.009833957999944687\n",
      "epoch:380------train loss:0.010616871528327465------validate loss:0.010131328366696835\n",
      "epoch:381------train loss:0.011409863829612732------validate loss:0.009547527879476547\n",
      "epoch:382------train loss:0.011534840799868107------validate loss:0.009963743388652802\n",
      "epoch:383------train loss:0.010353172197937965------validate loss:0.010059802792966366\n",
      "epoch:384------train loss:0.010754200629889965------validate loss:0.009640409611165524\n",
      "epoch:385------train loss:0.010354713536798954------validate loss:0.010621590539813042\n",
      "epoch:386------train loss:0.011641299352049828------validate loss:0.009477801620960236\n",
      "epoch:387------train loss:0.011947459541261196------validate loss:0.011957138776779175\n",
      "epoch:388------train loss:0.011132891289889812------validate loss:0.00972594041377306\n",
      "epoch:389------train loss:0.010274473577737808------validate loss:0.009874659590423107\n",
      "epoch:390------train loss:0.011343519203364849------validate loss:0.013006581924855709\n",
      "epoch:391------train loss:0.010393792763352394------validate loss:0.00953426118940115\n",
      "epoch:392------train loss:0.010065732523798943------validate loss:0.010211454704403877\n",
      "epoch:393------train loss:0.012160789221525192------validate loss:0.010806960053741932\n",
      "epoch:394------train loss:0.010165642015635967------validate loss:0.010027936659753323\n",
      "epoch:395------train loss:0.012543315067887306------validate loss:0.011054516769945621\n",
      "epoch:396------train loss:0.011121661402285099------validate loss:0.009974127635359764\n",
      "epoch:397------train loss:0.012082031928002834------validate loss:0.010922904126346111\n",
      "epoch:398------train loss:0.011212067678570747------validate loss:0.009450186043977737\n",
      "epoch:399------train loss:0.01153402030467987------validate loss:0.009706287644803524\n",
      "epoch:400------train loss:0.011163536459207535------validate loss:0.01050949189811945\n",
      "epoch:401------train loss:0.011230741627514362------validate loss:0.009557077661156654\n",
      "epoch:402------train loss:0.010060995817184448------validate loss:0.010222372598946095\n",
      "epoch:403------train loss:0.01000240258872509------validate loss:0.00998513400554657\n",
      "epoch:404------train loss:0.01141475047916174------validate loss:0.010180785320699215\n",
      "epoch:405------train loss:0.011466136202216148------validate loss:0.010391483083367348\n",
      "epoch:406------train loss:0.010499527677893639------validate loss:0.009869054891169071\n",
      "epoch:407------train loss:0.012267089448869228------validate loss:0.009628134779632092\n",
      "epoch:408------train loss:0.011124308221042156------validate loss:0.009642517194151878\n",
      "epoch:409------train loss:0.01137741468846798------validate loss:0.010631119832396507\n",
      "epoch:410------train loss:0.011158072389662266------validate loss:0.009482705034315586\n",
      "epoch:411------train loss:0.010465383529663086------validate loss:0.010492363013327122\n",
      "epoch:412------train loss:0.010442333295941353------validate loss:0.010111328214406967\n",
      "epoch:413------train loss:0.009883630089461803------validate loss:0.009903062134981155\n",
      "epoch:414------train loss:0.009849077090620995------validate loss:0.010003799572587013\n",
      "epoch:415------train loss:0.011118048802018166------validate loss:0.010404654778540134\n",
      "epoch:416------train loss:0.010428262874484062------validate loss:0.010190832428634167\n",
      "epoch:417------train loss:0.010480032302439213------validate loss:0.009966887533664703\n",
      "epoch:418------train loss:0.01106281392276287------validate loss:0.010001085698604584\n",
      "epoch:419------train loss:0.010072946548461914------validate loss:0.009569826535880566\n",
      "epoch:420------train loss:0.011341756209731102------validate loss:0.010202743113040924\n",
      "epoch:421------train loss:0.011533508077263832------validate loss:0.010857968591153622\n",
      "epoch:422------train loss:0.011479757726192474------validate loss:0.010163877159357071\n",
      "epoch:423------train loss:0.01159313041716814------validate loss:0.01136793103069067\n",
      "epoch:424------train loss:0.011989283375442028------validate loss:0.010153420269489288\n",
      "epoch:425------train loss:0.01058113481849432------validate loss:0.010843059979379177\n",
      "epoch:426------train loss:0.009937241673469543------validate loss:0.009968826547265053\n",
      "epoch:427------train loss:0.011006942018866539------validate loss:0.009609322994947433\n",
      "epoch:428------train loss:0.009592373855412006------validate loss:0.01030608732253313\n",
      "epoch:429------train loss:0.011119872331619263------validate loss:0.009533767588436604\n",
      "epoch:430------train loss:0.010910148732364178------validate loss:0.010615811683237553\n",
      "epoch:431------train loss:0.012448405846953392------validate loss:0.009707420133054256\n",
      "epoch:432------train loss:0.011206313967704773------validate loss:0.00984013732522726\n",
      "epoch:433------train loss:0.012684427201747894------validate loss:0.01159670576453209\n",
      "epoch:434------train loss:0.010706303641200066------validate loss:0.010783392935991287\n",
      "epoch:435------train loss:0.010130099952220917------validate loss:0.010246994905173779\n",
      "epoch:436------train loss:0.011618384160101414------validate loss:0.009754342027008533\n",
      "epoch:437------train loss:0.010567868128418922------validate loss:0.01003301702439785\n",
      "epoch:438------train loss:0.011043854989111423------validate loss:0.010000105947256088\n",
      "epoch:439------train loss:0.010336014442145824------validate loss:0.009739531204104424\n",
      "epoch:440------train loss:0.010329501703381538------validate loss:0.011066469363868237\n",
      "epoch:441------train loss:0.011689831502735615------validate loss:0.010477032512426376\n",
      "epoch:442------train loss:0.011385515332221985------validate loss:0.00979252066463232\n",
      "epoch:443------train loss:0.010992583818733692------validate loss:0.01008524652570486\n",
      "epoch:444------train loss:0.010474354028701782------validate loss:0.009923161007463932\n",
      "epoch:445------train loss:0.010657450184226036------validate loss:0.009847565554082394\n",
      "epoch:446------train loss:0.011707520112395287------validate loss:0.010359328240156174\n",
      "epoch:447------train loss:0.010100672021508217------validate loss:0.009795661084353924\n",
      "epoch:448------train loss:0.011348722502589226------validate loss:0.012289157137274742\n",
      "epoch:449------train loss:0.010931389406323433------validate loss:0.00958656333386898\n",
      "epoch:450------train loss:0.011030464433133602------validate loss:0.01045786589384079\n",
      "epoch:451------train loss:0.011285456828773022------validate loss:0.009956025518476963\n",
      "epoch:452------train loss:0.011732613667845726------validate loss:0.01011060830205679\n",
      "epoch:453------train loss:0.01086535956710577------validate loss:0.010222237557172775\n",
      "epoch:454------train loss:0.010852201841771603------validate loss:0.009680393151938915\n",
      "epoch:455------train loss:0.010200241580605507------validate loss:0.011024659499526024\n",
      "epoch:456------train loss:0.010755323804914951------validate loss:0.009913450106978416\n",
      "epoch:457------train loss:0.011772735975682735------validate loss:0.01035904511809349\n",
      "epoch:458------train loss:0.01136615127325058------validate loss:0.010557085275650024\n",
      "epoch:459------train loss:0.010337994433939457------validate loss:0.009973389096558094\n",
      "epoch:460------train loss:0.010674902237951756------validate loss:0.009599720127880573\n",
      "epoch:461------train loss:0.009279420599341393------validate loss:0.010878452099859715\n",
      "epoch:462------train loss:0.010752566158771515------validate loss:0.009855569340288639\n",
      "epoch:463------train loss:0.011403730139136314------validate loss:0.011606290005147457\n",
      "epoch:464------train loss:0.011742291040718555------validate loss:0.009832954034209251\n",
      "epoch:465------train loss:0.011440049856901169------validate loss:0.010192922316491604\n",
      "epoch:466------train loss:0.012302192859351635------validate loss:0.00946092326194048\n",
      "epoch:467------train loss:0.009981943294405937------validate loss:0.010358797386288643\n",
      "epoch:468------train loss:0.010799268260598183------validate loss:0.01062654610723257\n",
      "epoch:469------train loss:0.011240917257964611------validate loss:0.010557246394455433\n",
      "epoch:470------train loss:0.01017702929675579------validate loss:0.01062989141792059\n",
      "epoch:471------train loss:0.011174771003425121------validate loss:0.009596923366189003\n",
      "epoch:472------train loss:0.01127228606492281------validate loss:0.011310254223644733\n",
      "epoch:473------train loss:0.010059823282063007------validate loss:0.009748196229338646\n",
      "epoch:474------train loss:0.011205248534679413------validate loss:0.00969006959348917\n",
      "epoch:475------train loss:0.010272733867168427------validate loss:0.011728234589099884\n",
      "epoch:476------train loss:0.010391533374786377------validate loss:0.010274258442223072\n",
      "epoch:477------train loss:0.010253457352519035------validate loss:0.010470062494277954\n",
      "epoch:478------train loss:0.011061661876738071------validate loss:0.009617413394153118\n",
      "epoch:479------train loss:0.011232673190534115------validate loss:0.01013362780213356\n",
      "epoch:480------train loss:0.011580160818994045------validate loss:0.011209326796233654\n",
      "epoch:481------train loss:0.01006273739039898------validate loss:0.010807948186993599\n",
      "epoch:482------train loss:0.010851001366972923------validate loss:0.010143972933292389\n",
      "epoch:483------train loss:0.010339625179767609------validate loss:0.010180851444602013\n",
      "epoch:484------train loss:0.010245339944958687------validate loss:0.009717751294374466\n",
      "epoch:485------train loss:0.011096358299255371------validate loss:0.009622368961572647\n",
      "epoch:486------train loss:0.01068302895873785------validate loss:0.010082447901368141\n",
      "epoch:487------train loss:0.01110968180000782------validate loss:0.009628035128116608\n",
      "epoch:488------train loss:0.010521714575588703------validate loss:0.01014827098697424\n",
      "epoch:489------train loss:0.010942189022898674------validate loss:0.010254334658384323\n",
      "epoch:490------train loss:0.010881295427680016------validate loss:0.01002003625035286\n",
      "epoch:491------train loss:0.009835624136030674------validate loss:0.00988633744418621\n",
      "epoch:492------train loss:0.010066734626889229------validate loss:0.010581683367490768\n",
      "epoch:493------train loss:0.011154534295201302------validate loss:0.00948339980095625\n",
      "epoch:494------train loss:0.011129538528621197------validate loss:0.010028987191617489\n",
      "epoch:495------train loss:0.010005306452512741------validate loss:0.01009280327707529\n",
      "epoch:496------train loss:0.010275287553668022------validate loss:0.010535556823015213\n",
      "epoch:497------train loss:0.010700656101107597------validate loss:0.01023122575134039\n",
      "epoch:498------train loss:0.011780006811022758------validate loss:0.010352957993745804\n",
      "epoch:499------train loss:0.011339468881487846------validate loss:0.01043240912258625\n"
     ]
    }
   ],
   "source": [
    "# 超参数\n",
    "batch_size = 64\n",
    "epoch_num = 500\n",
    "learning_rate = 1e-3\n",
    "\n",
    "train_set_length = len(x_train)\n",
    "validate_set_length = len(x_validate)\n",
    "test_set_length = len(x_test)\n",
    "one_epoch_length = int(train_set_length / batch_size)+1\n",
    "\n",
    "# 使用GPU\n",
    "use_cuda = torch.cuda.is_available()\n",
    "device=torch.device('cuda' if use_cuda else 'cpu')\n",
    "\n",
    "pytorch_model = PytorchSinNet().to(device)\n",
    "\n",
    "# 设置loss和优化器\n",
    "criterion = nn.MSELoss()\n",
    "optimizer = optim.Adam(pytorch_model.parameters(), lr=learning_rate)\n",
    "\n",
    "print(\"one epoch length:\", one_epoch_length)\n",
    "\n",
    "# 训练主循环\n",
    "for epoch in range(epoch_num):\n",
    "    epoch_loss = 0\n",
    "    for i in range(one_epoch_length):\n",
    "        x_batch, y_batch = get_batch_data(x_train, y_train, batch_size)\n",
    "        x_batch, y_batch = x_batch.to(device), y_batch.to(device)\n",
    "        prediction = pytorch_model(x_batch)\n",
    "        loss = criterion(prediction, y_batch)\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        epoch_loss += loss\n",
    "    train_loss = epoch_loss / one_epoch_length\n",
    "    x_vali = torch.tensor(x_validate).resize(validate_set_length,1).to(device)\n",
    "    y_vali = torch.tensor(y_validate).resize(validate_set_length,1).to(device)\n",
    "    prediction = pytorch_model(x_vali)\n",
    "    validate_loss = criterion(prediction, y_vali)\n",
    "    print(\"epoch:{}------train loss:{}------validate loss:{}\".format(epoch, train_loss, validate_loss))\n",
    "\n",
    "# 将模型转到cpu上\n",
    "pytorch_model = pytorch_model.to(torch.device('cpu'))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "创建keras神经网络模型，并将pytorch训练号的神经网络参数复制给keras模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Linear(in_features=1, out_features=16, bias=True)\n",
      "Linear(in_features=16, out_features=16, bias=True)\n",
      "Linear(in_features=16, out_features=1, bias=True)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "d:\\Anaconda3\\lib\\site-packages\\keras\\backend.py:451: UserWarning: `tf.keras.backend.set_learning_phase` is deprecated and will be removed after 2020-10-11. To update it, simply pass a True/False value to the `training` argument of the `__call__` method of your layer or model.\n",
      "  warnings.warn(\n"
     ]
    }
   ],
   "source": [
    "keras_model = KerasSinNet()\n",
    "\n",
    "converter = PytorchToKeras(pytorch_model, keras_model)\n",
    "converter.convert((1))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "使用测试集验证pytorch模型和keras模型推理结果的差异"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "7/7 [==============================] - 0s 2ms/step\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAEICAYAAABcVE8dAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAABKk0lEQVR4nO2deXxU1fXAvzeThB0iSxVZ3csOCYLjAkEsGJcq5OfPhZSlrRMtVHGpiitWhRat4s+VKWqhiWA1hdZqaiolsg0gQVsUXFBWEYmBIGuWmfv7476ZTJKZJJNMMpPM+X4+85l579333nkzb84799xzz1FaawRBEISWT1ykBRAEQRCaBlH4giAIMYIofEEQhBhBFL4gCEKMIApfEAQhRhCFLwiCECOIwm/BKKUmKaXyIi2HF6VUG6XU20qpw0qpNyNw/tlKqSzrc2+l1FGllK0ex7lfKbUw/BI2DUqpPymlHo+0HDXh/1uF+bhRf+2NiSj8OqCUukkptclSEN8qpXKVUhdHWq7a0Fpna63HRVoOP/4HOBXoorW+LpKCaK13a63ba63dNbVTSqUqpfZW2XeO1vqXjSth80IpNVUptSbScgg1Iwq/FpRSdwLzgTkYZdUbeBG4JoJi1YpSKj7SMgSgD/CF1rq8oQeK0usThOhGay2vIC+gE3AUuK6GNq0wD4R91ms+0MralgrsBe4BDgDfAtcCVwBfAAeB+/2ONRt4C3gDOAJsBob4bb8P+MrathWY4LdtKrAWeMY67uPWujXWdmVtOwAcBv4LDPS7zsVAIbALeBCI8zvuGuAp4BCwA0ir4fvoB+QDxcCnwE+t9Y8CpUCZ9Z3+IsC+tV3/TuBeS/YSIB64AFhnne8/QKpf+zOAD6xj/Qt4HsiytvUFNBBvLXcGXrN+w0PAcqAdcALwWDIfBU635MzyO89PrWsttq69XxWZ77ZkPmxdW2trW1fgH9Z+B4HV3u89wHfzLLAH+AEoAC6p8r39xfoNj1iyDPfbPsz6Lo9Y518KPB7kPGcB/waKgO+BbCDJb3sv4K+Ye6XI+k77AScBt/UdFVtt84FfVrlH14RwTVlBZNwGXOW3HG/Jmmwtvwnst77vVcAAv7Z/8l57VXmsdRo42++//RSwG/gOeBloE+pvF00vsfBrxg60BpbV0OYBjNIZCgwBRmAUppfTrGP0AB4G/ghkACnAJcDDSqkz/dpfg7lhOwOvA8uVUgnWtq+sfTphFGiWUqq7374jga+BHwFPVJFzHDAKOBdIAq7H/GEBnrOOeSYwGpgMTKty3M8xN/k84BWllKr6RVhyvg3kWTL8GshWSp2ntX4E00t6QxtXyitV96/D9QPcCFxpXcOpwDuYh1tnjGLNUUp1s9q+jlEkXYHHgClBzgnwZ6AtMMCS/Rmt9TEgDdhnydxea72vyjWfCywBZgLdgHeBt5VSiX7N/he4HPMAGoxRNAB3YQyCbta13I9ROIH4EHOPeb+XN5VSrf22/xSjyJOAv2MUMZYcy63r64z5btNr+B4UMBfzYOuHUfCzrWPZMEpuF+aB2QNYqrXeBtwCuKzvKKmG44dyTcFYgrkPvIwHvtdab7aWc4FzML/jZsxDqz78HvN/GQqcTcV/GEL77aKHSD9xovkFTAL219LmK+AKv+XxwE7rcyrGQrRZyx0wN8VIv/YFwLXW59nAer9tcZhewSVBzv0xcI31eSqwu8r2qVRY+JdiehUX4GeJADaMtdzfb10mkO93jO1+29pa13BaAHkuwVhW/sdfAsz2u76AVltdrh9jLf/cb/u9wJ+rHOM9jGLvDZQD7fy2vU4ACx/ojrHiTwkgUyqwN4Cc3uM8BPyliszfYPU0LJkz/LbPA162Pv8W+BuWRRnivXkIq/djyfO+37b+wAnr8yhMr0X5bV9HEAs/wHmuBT6yPtsxln18gHa+e81vXT41WPh1uKZgFv7ZmN5KW2s5G3g4SNsk63fuZC3/iTpY+JgH3zHgLL9tdmBHQ3+7SL7Ewq+ZIqBrLf7i0zEWj5dd1jrfMXTFwOAJ6/07v+0ngPZ+y3u8H7TWHowVcTqAUmqyUupjpVSxUqoYGIixXqvtWxWt9b8xVt8LwHdKKadSqqO1f2KAa+jht7zf7zjHrY/+Mns5HdhjyR3sWLUR9PqrbseMCVzn/T6s7+RijAI/HTikjZXuL0sgegEHtdaHQpDTS6Xf35J5D0G+P+A4Fd/dk8B2IE8p9bVS6r5gJ1FK3aWU2mZFOBVjemT+v33Vc7S27tvTgW+0paUsgn0PKKV+pJRaqpT6Rin1A5Dld55ewC4dhjEY61y1XVNAtNbbMW6dq5VSbTG9m9etY9qUUr9TSn1lyb/T2q3W41ahG8a4KfC7t/5prYcQfrtoQhR+zbgwvslra2izD6N4vPS21tWXXt4PSqk4oCewTynVB+MOmoGJckkCPsFYIl5q7FJqrf9Pa52CcVucC/wG4/ssC3AN39RD9n1AL0vu+h4r4PX7bfe/xj0YCz/J79VOa/07TM/gFKVUuyqyBGIP0FkplRRgW23d9Eq/v+Xq6kUdrllrfURrfZfW+kzgauBOpdTYqu2UUpdgejP/i+mFJGH809XcagH4FuhRxQUX7HsA487RwGCtdUeM+9G77x6gdxADKND3dAyjNL2c5v3QwGuCCrfONcBW6yEAcJO17jLMA6Sv95S1yaeUOs1v2/cYY2yA373VSWvdHur+20UbovBrQGt9GOOze0Epda1Sqq1SKkEplaaUmmc1WwI8qJTqppTqarVvSPxwilJqovWnmolxt6zHDCBqTJcapdQ0jIVfJ5RS5yulRlr+8GNYg2xW7+MvwBNKqQ7Wg+XOel7DBuvY91jfUyrmz7A0hGMEu/5AZGGsvPGWZdfaCqPsqbXeBWwCHlVKJVphtFcHOojW+luM3/dFpdQpluyjrM3fAV2UUp2CyPAX4Eql1Fjru73LknldbReqlLpKKXW2pYx/wAx6BgoT7YBxTxUC8Uqph4GOtR3fwmXte5tSKl4pNREzzhSMDlgDr0qpHhijwMtGzAPkd0qpdtb3fZG17TugZ5Wxi4+Bidb/5mzgF2G6JjD31DjgVizr3u+4JZjeeVvMuFEw/gMMUEoNtcYOZns3WD21PwLPKKV+BKCU6qGUGm99rutvF1WIwq8FrfXTGAX4IObm3IOxspdbTR7HKJb/Alswg0QNmdjxN8yA6iHgZ8BErXWZ1nor8AfMH/g7YBAmKqeudMTcwIcwXfoiTAQCmMHVY5gB3zWYP9CroQqutS7FdK/TMBbSi8BkrfVnIRwm4PUHOd8ejDV3PxW/zW+ouK9vwgw4HwQewUSxBONnmJ7OZ5hIppnWOT7DPNS/trr2/u4ltNafY6zg56xrvhq42vouauMc4H2MgnUBL2qt8wO0ew/zQPoC89udpAb3XRX5SoGJGH/1Icx3+9cadnkUSMZY2+/4t7WMg6sxPu7dGHfb9dbmf2Oig/Yrpb631j2Dicz6DlhE5cHTel+TJcu3mO/sQkzkkZfF1vG+wUSyBTMW0Fp/gfHFvw98ibn3/bkX47ZZb7mH3gfOs7bV9beLKlRl154QSZRSszGDQBmRliUSxPr1C0JjIxa+IAhCjCAKXxAEIUYQl44gCEKMIBa+IAhCjBDVCai6du2q+/btG2kxBEEQmg0FBQXfa627BdoW1Qq/b9++bNq0KdJiCIIgNBuUUkFnUotLRxAEIUYQhS8IghAjiMIXBEGIEaLahy8IQmiUlZWxd+9eTp48GWlRhEamdevW9OzZk4SEhNobW4jCF4QWxN69e+nQoQN9+/ZFVa9RI7QQtNYUFRWxd+9ezjjjjDrvJy4dQWhBnDx5ki5duoiyb+EopejSpUvIPTlR+DGGywVz55p3oWUiyj42qM/vLC6dGMLlgrFjobQUEhNhxQqw2yMtlSAITYVY+DFEfr5R9m63ec/Pj7REQktl2bJlKKX47LPaSyHMnz+f48eP19ouGH/605+YMWNGvfcP93GiGVH4MURqqrHsbTbznpoaaYmElsqSJUu4+OKLWbq09mJnDVX4Qt0RhR9D2O3GjfPYY+LOESoI97jO0aNHWbt2La+88kolhe92u7n77rsZNGgQgwcP5rnnnuP//u//2LdvH2PGjGHMmDEAtG/f3rfPW2+9xdSpUwF4++23GTlyJMOGDeOyyy7ju+++CyqDx+Ohb9++FBcX+9adffbZfPfdd3U6ztSpU3nrrbd8y/4yPfnkk5x//vkMHjyYRx55BIBjx45x5ZVXMmTIEAYOHMgbb7xR7ZjRgPjwYwy7XRS9UEFjjOssX76cyy+/nHPPPZfOnTuzefNmkpOTcTqd7Nixg48++oj4+HgOHjxI586defrpp1m5ciVdu3at8bgXX3wx69evRynFwoULmTdvHn/4wx8Cto2Li+Oaa65h2bJlTJs2jQ0bNtC3b19OPfXUkI5Tlby8PL788ks2btyI1pqf/vSnrFq1isLCQk4//XTeeecdAA4fPhzal9ZEiIUvCDFMY4zrLFmyhBtuuAGAG264gSVLlgDw/vvvc8sttxAfb+zMzp07h3TcvXv3Mn78eAYNGsSTTz7Jp59+WmP766+/3mdpL126lOuvv75ex/EnLy+PvLw8hg0bRnJyMp999hlffvklgwYN4v333+fee+9l9erVdOoUrOZ9ZBGFLwgxTLjHdYqKivj3v//NL3/5S/r27cuTTz7JG2+8gdYarXWdQgn92/jHmf/6179mxowZbNmyhQULFtQag26329m+fTuFhYUsX76ciRMn1vk48fHxeDwewExyKi0t9X2eNWsWH3/8MR9//DHbt2/nF7/4Beeeey4FBQUMGjSIWbNm8dvf/rb2LysCiMIXwoLE9zdPwj2u89ZbbzF58mR27drFzp072bNnD2eccQZr1qxh3LhxvPzyy5SXlwNw8OBBADp06MCRI0d8xzj11FPZtm0bHo+HZcuW+dYfPnyYHj16ALBo0aJaZVFKMWHCBO6880769etHly5d6nycvn37UlBQAMDf/vY3ysrKABg/fjyvvvoqR48eBeCbb77hwIED7Nu3j7Zt25KRkcHdd9/N5s2b6/6lNSHiwxcajMT3N2/COa6zZMkS7rvvvkrr0tPTef3113nuuef44osvGDx4MAkJCdx8883MmDEDh8NBWloa3bt3Z+XKlfzud7/jqquuolevXgwcONCnXGfPns11111Hjx49uOCCC9ixY0et8lx//fWcf/75/OlPf/Ktq8txbr75Zq655hpGjBjB2LFjadeuHQDjxo1j27Zt2K0vrH379mRlZbF9+3Z+85vfEBcXR0JCAi+99FJ9v8JGJapr2g4fPlxLAZSmweUy/tvU1ND//HPnwkMPGT+wzWasxVmzGkNKoTa2bdtGv379Ii2G0EQE+r2VUgVa6+GB2ouFLzTYQvf6gb37S3y/IEQnovBbOHWx3KtGaixeHJq17/UD17eHIAhC0yAKvwVTV8vd30K32eC116C8PDRrX+L7BSH6kSidFkCwCJm6xlj7R2r8/OcV+5SUSL4dQWhJiIXfzKnJivda7iUloBRYUWkB8Vro994L3nF8jwf8ZqbXKoe4dAQhuhELv5lTkxVvt8P8+cZN4/HAzJm1x8l//HHNy4HwPnQeesi8Syy+IEQnovCbObXNlCwqMsre46nb1Pn09JqXAyFplwUvO3fuZODAgZEWo8H4X8emTZu47bbbamw/Z86ckM8RiXTM4tJp5tQWIRNqyKTDYd5zcoyy9y7XhIRlCuGgvLzcl2cnms4xfPhwhg8PGNbuY86cOdx///0NEa1JEAu/BWC3GyWbn1/dnVKfqfMOB7z3XmVlX1PqBEm73MxppLwYX3/9NcOGDePDDz/kq6++4vLLLyclJYVLLrnEVxhl6tSp3HnnnYwZM4Z7772XjRs3cuGFFzJs2DAuvPBCPv/8cwA+/fRTRowYwdChQxk8eDBffvlltfO1b9+eu+66i+TkZMaOHUthYSEAqamp3H///YwePZpnn32WgoICRo8eTUpKCuPHj+fbb78FoKCggCFDhmC323nhhRd8x83Pz+eqq64CTOrnadOm+VI85+TkcN9993HixAmGDh3KpEmTAMjKyvLJm5mZidvtBuC1117j3HPPZfTo0axduzas33ed8CY1isZXSkqKFmpn3Tqt27TR2mYz7+vW1dx2zpzgbda9u0CPuq2jTroPfdpvlL729tP0rbeN0CkZnfUlKZNqPb4QWbZu3RraDqHcPHVgx44desCAAfqzzz7TQ4cO1R999JHWWutLL71Uf/HFF1prrdevX6/HjBmjtdZ6ypQp+sorr9Tl5eVaa60PHz6sy8rKtNZa/+tf/9ITJ07UWms9Y8YMnZWVpbXWuqSkRB8/frzauQFfm0cffVRPnz5da6316NGj9a233qq11rq0tFTb7XZ94MABrbXWS5cu1dOmTdNaaz1o0CCdn5+vtdb67rvv1gMGDNBaa71y5Up95ZVXaq21vueee/Ttt9/uO+fBgwe11lq3a9fOt27r1q36qquu0qWlpVprrW+99Va9aNEivW/fPt2rVy994MABXVJSoi+88EKfjPUl0O8NbNJBdGpY+k9KqVeBq4ADWutqDjxl0t89C1wBHAemaq2jM7tQMySQDz2Qle10wowZpl2rVsYap9hJfkEOxSeKyT/2CZs6HcdzincPzfK2+4H9cApwVjYX9lpDfv5OseJbCnW9eUKgsLCQa665hpycHAYMGMDRo0dZt24d1113na9NSUmJ7/N1112HzWYDTGKzKVOm8OWXX6KU8iUts9vtPPHEE+zdu5eJEydyzjnnVDtvXFycLwVyRkaGLzsm4Fv/+eef88knn/CTn/wEMEVZunfvzuHDhykuLmb06NEA/OxnPyM3N7faOd5///1KRV1OOeWUam1WrFhBQUEB559/PgAnTpzgRz/6ERs2bCA1NZVu3br5ZPriiy9q/T7DSbgcZn8CngcWB9meBpxjvUYCL1nvQhiozYfuynWybEUO6/O7MWzCCo52PELnzdeSu2QUT3XM5KQNdAKQZO1QNYOtttZpWDdkF1ccyQCy6iWrhG9GGY0wANOpUyd69erF2rVrGTBgAB6Ph6SkJD4OEvLlTUwG8NBDDzFmzBiWLVvGzp07SbXkuemmmxg5ciTvvPMO48ePZ+HChVx66aU1yuGfZtl7Dq01AwYMwFXFfVVcXFyn1M26DimetdZMmTKFuXPnVlq/fPnyOp2jMQmLD19rvQo4WEOTa4DFVo9jPZCklOoejnPHKv5uV38f+gMzM5j9Rhecz2QAcO8DI7l4fSZPtc9j9VXZbOy3n609jrHm6mzWl8ym1Abaexd470Vd5eVdZ21fdTyXex8YyTl3JXDvA3V/bkv4ZhTSCAMwiYmJLF++nMWLF/P666/TsWNHzjjjDN58803AKMT//Oc/Aff1T13sn+Hy66+/5swzz+S2227jpz/9Kf/973+r7evxeHxlCV9//XUuvvjiam3OO+88CgsLfQq/rKyMTz/9lKSkJDp16sSaNWsAyM7ODijfuHHjeP75533Lhw4dAiAhIcHXGxk7dixvvfUWBw4cAEwa6F27djFy5Ejy8/MpKiqirKzM9300JU0VpdMD2OO3vNda923VhkopB+AA6N27d5MIF80EsohdLvjVJCdJPXP451+70WfkZo4kFLNTH+TjpBJIhLzD2ay6aw3ZHXaZnSwL3V+pF9oOk+iGEsDj3Q70+d5GcTs3ieWKM/eeSruupfy728FKyn9ewkZIgHls5Ju7+pL1h521XksjeA+EcNAIeTHatWvHP/7xD37yk5/Qrl07srOzufXWW3n88ccpKyvjhhtuYMiQIdX2u+eee5gyZQpPP/10JQv+jTfeICsri4SEBE477TQefvjhgOf89NNPSUlJoVOnTgHryiYmJvLWW29x2223cfjwYcrLy5k5cyYDBgzgtdde4+c//zlt27Zl/PjxAa/rwQcfZPr06QwcOBCbzcYjjzzCxIkTcTgcDB48mOTkZLKzs3n88ccZN24cHo+HhIQEXnjhBS644AJmz56N3W6ne/fuJCcn+wZzm4qwpUdWSvUF/hHEh/8OMFdrvcZaXgHco7UuqOmYsZ4eOdgs2odvc/JUx0xKbJairoqlvDufVBxsrSuUfRUeL53EW0tH0alnDjqxmKPdt9NmaxrrPsryzba12eDmmyG+NINP2+Ryw1lpzNvzBl91LPcdV2lYe8EC7Gk1x3BK3vzGJ5bTI7dv396XOz9WiNb0yHuBXn7LPYF9TXTuqKOufmx/i/jkSZPF0m6Hk4k5lNrAE0dlq92LpazTynqT3XqXb3nSkT4cc5ewjyP8os+1OO7I4tKrYfZsB++/byZnxcXB+eebGbbe/PYmmVoWiYnwxAq44L9f8lXHjb5zaw3z3nuEj9//FcdsHqYlnM/vn9hQ7Xokq6YgRJamUvh/B2YopZZiBmsPa62ruXNigbpauS4X7N5tFPCPuzvp2jcHV246LpeDCWPTeX5tXjVXjBel4TflI/j9HzYw6pkMcnblkt4nDcfs6gOtdjvMng2rV1fINH++2Zafb2T44x8ru2GmOzaw88m+rBu0C60hwQPLk/b7jjmPjay/vRO/u/zJala/ZNUUGotYs+7rQ7jCMpcAqUBXpdRe4BEgAUBr/TLwLiYkczsmLHNaOM7bHKmLH9v/oXDh0AxcV2SzTUErdx65S+C3/+dgBZBfkEOXdt346NvN7D5WTFGRJuHoKfywZSZnPWAUreOOLGqbLBvI8vYOqA4bVj2Iw26HJ3+zk9wlTk4m5vBB+Vo2Jh2rND6w6pQfGLs2kxVQq6tHEISmISwKX2t9Yy3bNTA9HOdq7tQlCs77ULhgSAZrr8w2VryCEm3cOeDAnuaopEjnzoUHXzZuGTDx9oMGVRyvNheKv+VdtRcyf77JyeN/DNPejK87n8lg4+Hsyj0NBaVx5qEkCl8QogPJpdPE1MWPnZoKFw3NYI2fskebGNoJYwNnM0tNNe4fr8IvLzc+/0WLQh8krTp28NFHEKgmc8VYRBaPvw0vHVvK0dZujrYy2xM9kJqSXqWtuHQEIVKIwo8Atfqxi524rqii7DW8cMqkoNay3Q533gnz5pllrWH//vqFQaamQny82U9rWLjQrJ88OXgvYMqULPb/MQu3Gwb1cnJOcg4TLkvHnuYIOMM3mBzyYBCExkOSp0Uh+QU5uKso+9+WTcJxR82zW5OSjJUP5v2002pOnRwMux2mTTNFU8D0FhYsqDxZqupYBJhzxMXBlj0Olr/9Ho57jLLPcmYw6Pou2Idm1FhFSyZmxSb1SS0cjNTUVJo6lNs/zfHLL7/M4sXBEg6YtMuvv/56yOeYOnWqb1JZQxCFH4WkpqTTyg1xHhMB89uySTwwp/ZUBqmpxoK22cz75Mn1n0Q5eTK0bl2h9LWunOu+SxezLS7OKPrJk42v/8wzzXpv/v0Vf89g9VXZbD7rIGuvzubiS0cGffBIXv3YJFSFr7XG4/VdNiL1mRR1yy23MHny5KDb66vww4Uo/CjEnuZgxUULeDxhHB/YF9So7IOlWPAqeLsdZs0K3T3iPVZmZsVDxNtLcLlM9SyPx6z3hnH++tfw1Vfm4eB9EOzvaSWgsh4cay7cCMXOgOesrZiL0Di49riYu3ourj0N71Lt3LmTH//4x0yZMoXBgwfzP//zPxw/fpwVK1YwYcIEX7t//etfTJw4MWBq4aeffpqBAwcycOBA5ls3186dO+nXrx+/+tWvSE5OZs+ePcybN49BgwYxZMgQ7rvvPt+x33zzTUaMGMG5557L6tWrq8mYn5/PqFGjmDBhAv379+eWW27xPUDat2/Pww8/zMiRI3G5XCGnOZ49ezZPPfUUANu3b+eyyy5jyJAhJCcn89VXX3HfffexevVqhg4dyjPPPIPb7eY3v/kN559/PoMHD2bBggWAeajNmDGD/v37c+WVV/rSNDSYYGk0o+HVktMjV01TvO7dBXrOY+P0uncXhHSMMGa2rbOsc+ZoHRenNZj3OXO0vuUWs+x9jRhh2i94epLmEcxrtnk/5TdKT/nliDqdSwiNUNMjr9u9Trd5vI22PWrTbR5vo9ftbnh6ZECvWbNGa631tGnT9JNPPqk9Ho8+77zzfGmJb7zxRv33v/9da105tfCmTZv0wIED9dGjR/WRI0d0//799ebNm/WOHTu0Ukq7XC6ttdbvvvuuttvt+tixY1prrYuKirTWJhXynXfeqbXW+p133tFjx46tJuPKlSt1q1at9FdffaXLy8v1ZZddpt98802ttUmx/MYbb2it65fm+JFHHtFPPvmk1lrrESNG6L/+9a9aa61PnDihjx07VinVstZaL1iwQD/22GNaa61PnjypU1JS9Ndff61zcnL0ZZddpsvLy/U333yjO3Xq5JPRn1DTI4uFHwGq+qqznncydm0mD5XlMXZtJq7cwBZwVZrKBVK1l9ClS0U0kMcTuDh6crJp77gji3vKRhDnl4jtUFvNoh4bmXrzyGq1N+rbIxHqR/7OfErdpbi1m1J3Kfk78xt8zF69enHRRRcBJk3xmjVrUErxs5/9jKysLIqLi3G5XKSlpVXbd82aNUyYMIF27drRvn17Jk6c6LPS+/TpwwUXXACYNMXTpk2jbdu2AHTu3Nl3DG9a5JSUFHbu3BlQxhEjRnDmmWdis9m48cYbfUnTbDYb6VZdT/80x0OHDmXFihV8/fXXldIcJyYm+lIv+3PkyBG++eYbX6+mdevWPln9ycvLY/HixQwdOpSRI0dSVFTEl19+yapVq7jxxhux2WycfvrptWYGrSsSpRMBqirq5Zuf5WQvk7WyVNc9dj1SpQWLiipCQOPizPLkySYFg1cWfzfm75/YwLW5Tq5ceQuH2lbk9vmg7Wb+Irl1Ikpq31QSbYmUuktJtCWS2je1wcesmgLYuzxt2jSuvvpqWrduzXXXXRew1KCuIbeXfxplXUOa4latTFywzWajvLw8JBlbt27ty82vG5DmuKbrqNruueeeq5as7d13322UVMpi4UcAf1/1RcMyWN5zK9pSgvG6Ina9NpqytKC/JV51cNgbQrlyJTzxhHn3l8XlgvyPHYz71hSE8Fr6Q/YlyyBthLH3srNi8goeG/MYKyavwN6r4TfR7t27femHlyxZ4ktTfPrpp3P66afz+OOPM3XqVF97/9TCo0aNYvny5Rw/fpxjx46xbNkyLrnkkmrnGDduHK+++irHjx8HTAriUNi4cSM7duzA4/HwxhtvBEyl3JA0xx07dqRnz54sX74cMAVfjh8/TocOHThy5Iiv3fjx43nppZd81//FF19w7NgxRo0axdKlS3G73Xz77besXLkypOsLhlj4jUyguHKvos5d4uR3Sdm44wAFygPTSvqFNDO1KXLTBMr/E2jyWCBZKu+7gevTR7Kh82ZGH08m884N5L0jxc8jjb2XPSyK3ku/fv1YtGgRmZmZnHPOOdx6662+bZMmTaKwsJD+/fv71lVNLTx16lRGjBgBwC9/+UuGDRtWzTVz+eWX8/HHHzN8+HASExO54oorQor2sdvt3HfffWzZssU3gFuV/v37NyjN8Z///GcyMzN5+OGHSUhI4M0332Tw4MHEx8czZMgQpk6dyu23387OnTtJTk5Ga023bt1Yvnw5EyZM4N///jeDBg3yDQ6HhWDO/Wh4NfdB25oGVde9u0CPu72zVg9XDGbGP0RIg7ZNxZw55hrAvM+ZE759//zcAv3LaeP0n5+LvutujoRc0zbMeGvaBmP69Ol64cKFTShRdaoOnDZnIlLTVghMsERprlwno9dlUpZk2ikP2DTMLg8+kzaSNGSsoKZ9XblOHPszKekFiw7kcfyZVbVOLhOaLykpKbRr144//OEPkRYlZhGF34gEU3bz3nvEKHvLb3/G94m0+8dzxN0SfcoeGpbHvqZ98wtyTBGXOPBomH4om0G5oyDJIekVmil9+/blk08+CbitoKDGekdNRmpqqq9WbqwhCr+RmTLFvPvnofkirvIAU+vSBLYXOqLah92QsYJg+6ampGNz5eGxCql4gGUrcnj+RUeNmTqFmtF1KLQtNH90PaoVisJvJKoOdHrDFF25Tr5sZyWfsX6vSz3XMmh+RZRKrCg2e5qD5z9bxfRD2XiAVh44cagbI4ePp3BHOtv2OZgxw4R/Sthm3WjdujVFRUV06dJFlH4LRmtNUVERrVu3Dmk/UfiNRDD/fX5BjilNqAAPjPz8NAZdmsXMmbEZj+64I4tBuaN8xVxmkk1pT0gclcfAP8Mnex2+vDxS9Lx2evbsyd69eyksLIy0KEIj07p1a3r27BnSPqLwG4lg/vvUlHQS1+ZRqk2++OOuR8k5VL80xi0FbzGXuY+Pp9QG7jg4Cfx43Hy+zHZI2GYIJCQkcMYZZ0RaDCFKEYXfSAQbrLSnOXB+Ba++mcP3O9PZXuhgxgOVa8rGqmJLTUnHti4PtwKt4G+nbWPhPCd7jjjEhy8IYUAUfiNitwPFTvJX5EBxui/kMmOGg7NSKkeiDBoUu4U/KianOfh5ybMsaLMVHQflCvYU5zDrweiMXhKE5oYo/EbElWuSopXaIHFtXqWC3lUjV5pixmw0UnVw2znvdhbtz/S5vIpPFDN+ZhfS+6RJjL4gNBDJpRMiVbM71sS8vNmciDc+aW9Bb6EyVQe39xwxtQAeSxzHr/UI5iVsJC/pIJmHs3E+kxFpcQWhWSMWfggEyikTzCp3PpPB8k7fmgWrAHldk6LFEoEGt+12M4g7fmYXSMI3Qe2VXcsR544g1B+x8EMglPzzr+xaZj5YodBnH0mMyrQJkaamjJ+j2lr50q35Cps6HhMrXxAagCj8EAilBF/Xsk6Vls8orVwlJBTXUEsnWNGTuA5ZjNx2mlH4yqRgmHEou84FYgRBqIwo/BAIJf98im02CR5Am0LkKbbZvm1VK16J0g9MaiqUfPgo8d5qWQrcSsZCBKG+iA8/RIJF07hcJr/9ycQcJoxNJ+1GB29PgqSeORTvTSctu8KdE2wWrlAZux1ezHbw+sJVLOiRjVtBKzekXiBjIYJQH1R9EvA0FcOHD9ebNm2KtBi14nLBryY5+fwmKwTTDSsuWhA062Mog7+CwZXrJL8gh9SUdBkLEYQaUEoVaK2HB9oW8y6dcPjS8/OhQ//5nLTVLQSzKUsTthiSHGB7z7wLglAvYtqlEy5Lu1cHJxuHbfPVpbVpUMfSazx2rE60qg/SIxKE8BDTFn6wMMtAVn+wnoAr18mft8+izFuXVsPIj/qzcrNDCnSHiVDCYQVBCE5MW/iBJv0EsiYhsIXpTZ1Q0gk8CuI8ZlDxyNbbybhPEqKFi4aUWBQEoYKYVviBMlrOnRvYmgyW277UKtEX54bRBztzcdxc0rIdMZ8QLZyEUmKxIhGbfOeCUJWwKHyl1OXAs4ANWKi1/l2V7anA34Ad1qq/aq1/G45zN5SqvvRg1mRdcts/ceXcShEk4qcPH3X5Ll0u89uUlUFCgoS7CkJVGqzwlVI24AXgJ8Be4EOl1N+11lurNF2ttb6qoedrbIJZk/7rvipw8uofcxidnM6KixY0OFxQrNLwsHixeSiDeV+8WL5PQfAnHBb+CGC71vprAKXUUuAaoKrCbzYEsia96564P4PZ8dl4ekH2/jycLGDWg+/V+1wSgSIIQlMRjiidHsAev+W91rqq2JVS/1FK5SqlBgQ7mFLKoZTapJTaFMm6nAEjdXKdPBqfTXmc8duXxMEHmxs2zV8iUMLH5MkwKiWD5IwujErJ8BWOFwTBEA4LXwVYV3X67magj9b6qFLqCmA5cE6gg2mtnYATzEzbMMgXMsGs7vyCHNwKX7reOGB0csOm+UsESu3U1eW1ZX0Gq67KNgtnZbNlPdjtUjRFELyEQ+HvBXr5LfcE9vk30Fr/4Pf5XaXUi0qprlrr78Nw/rAT0OoudrL7hz0ktIIyzOSqR8onkTGjYTM/Q4lAiUVCcXnl7MqtlD8/Z1eu5M8XBD/C4dL5EDhHKXWGUioRuAH4u38DpdRpSillfR5hnbcoDOduFKqmQe7VwcmYdZksaLMND+A40Z8P7At4YE54rMdg6YGF0Fxe6X0q58+3lSXy8G1OyUYqCBYNtvC11uVKqRnAe5iwzFe11p8qpW6xtr8M/A9wq1KqHDgB3KCjOGub1+pevNgs/+uzZynpCigosxxYksCraQjF5eW4IwueMZWxNnc4Rl7X/eS7M3l7ksm6KQ9UIdYJSxy+1vpd4N0q6172+/w88Hw4ztWULFpkFM1FaRq6+m+J2mdViyNUl5fjjiy23z2egrg83HFwEmjffz75+aLwBSGmc+nURH4+nN3NycX28ehvk0l0g/KY1MeTx8yMtHgxRagur9al6dis4jNawYfDttGrg1TJEoSYTq1QE54jGWybko3HKrrxsHsSca0KJR97MyDtRgcfzHmW1clb0XFQrmBPcQ7IEK4Q44jCD4Ar18lsK94eBSVAXOvCBk2wEpoOux1uHn87H+7P9KW9SE2RKlmCIC6dACxeOb9SvL1Ni8JobmTMcLDiogU8ljjOVB8D5j4+XgqgCzGNWPhVcOU6ea2VXzETDzx/yqSgbhzJgxO92NMc2NMcvjTWpTZIXJvHCiTKSohNxMKvQn5Bjs+VozTcfLK/CfcLgHdS0EMPmXeJ945OvGms61J+UhBaMmLhU2Gl9+rgZPcPe7G1Aizf7+QxtwfdL9CkILHyow9vGusSbbx0Xdp1i7RIghARYt7C91rpS1524tifyR/bbEUBN5/sx4qLFtTY9a86I1fy4EQn9jQH87tMwga4Fcw4lI3zmYxIiyUITU7MK3xvvL173J2cjDfd/nIFvTv2qtXP650U9NhjktY42ik6VohbgY6DsjiYfihbBnCFmCPmXTqeIxlsnZKN2/vo0xAfQlSOVLVqHvRKSifuQB4eDSjwYHz5MngrxBIxbeF789u7rUFabxjmtJJ+oghaGHuOOLC/O4l4j6k/3Epi84UYJKYVftX89t6B2ot6z4ysYELYSU2FTVuz6LdoAaM/GIfztJrHZwShJRLTLp3UlHRarc2jBBOCOfzz0zi+/lEchQ7OShFXTUuiIgmbg9RUk0jNletscD1iQWhOxLTCt6c5WIGx9E8cSGdOjgO320TdSIhly6LqBDmZjCXEIjGt8MFvNqYLnloopQZbIoGqZlWajKVlAFeIDWLOh+/KdQbMqSIhli2XQBPkeiWlk+gGm9uM2/RKkgFcoeUTMxa+ywW5S5w81TF4N15CLFsmgapm5ec7OPvP0Ll3Dgd3p7PnlurWveRJEloaMaHwvV36kcNzKB0TuBsvf+6WS7CqWY895qD0G0dAF14oxdMFobkQEwo/Px+G98/gwHlrUR6T7tg/R7r8uVs+VXtvwR4C3sidEwfSKS11SJ4koUUREwrfcySD1Vdl+5bTCk/joase9Vn3kgQtdqjak/P/nStF7nTMY0AP+GRv4B6AIDRHWqTC9/9Tb1mfwdOe180Ga4KVO6G0ku8+kI+3pmPKw6B5UltPrmrkzil9cojb52D+fPnNhZZBi1P4/n/qi4ZlsOrKbGhtbdTmLb1PWqV9gnXvAx1TXD7Nl9p6ct40yt6yiN/vSEdrKCqKlMSCEF5anML3/1Mf/XGuWWlZ9p1PKu6Mu4mik1m4XNV9usGUuLh8Wga19eS8E/GWrchhwwfdiDsjh4QESE2t6A1KT09ozrQ4he//p27/WRqcle2z7K/ZeRO/XZaF2x2apV4Xl48Q/dTWk4OKMN3/a2N8+XGePLasXwVksXgxvPoqId8/ghAttDiFX/lPncWW9bD0q1zcG9L4U0EW2lL+oVjqdVEUQvOgLnMtFq98lpK2gDL+/FsOZTNk0ij+s9NRr/tHEKKFFqfwASh2gjsHitNx3JFF0Vx46CN8f1alQrfUZVJWLKErPlpFUxLPfwS9w1j/9bl/BCEaaHEKP1BSrNRUh88lEx8P06bB5MmiwIXATB4zkwXrM43aV2bdsY5HsNnk/hGaNy1O4QdKijXrQYe4ZIQ64XJB/scOJu97hUU9NvqM/Ru7XUvcY3L/CM2bFqfwq4bWeWfTiktGqA1v+G1JCXg8G7g4OYMT/XNJ7ZDGpVePIr9gPBSnA5JVU2ietDiF75/jXgpbCKHgDb/1eMzyms1ZsBnKejt5sbPkzheahsYM/W1xCh8qctwLQih4w2+NhV+x/pQ+wXPnS1y+EE5cLhgzpiIEfOXK8N5XLVLhC0J98A+/LS6Gp582iv/w3nQS3cZNGK9h9w97TD2FJIfMwBbCyrx5xuAA8754cRQqfKXU5cCzgA1YqLX+XZXtytp+BXAcmKq13hyOcwtCOPEf67n2Wq/17oBiE5//x1ZbebnNNl5Zl8l9h5GMmkLYcLng7bcb9xwNrnillLIBLwBpQH/gRqVU/yrN0oBzrJcDeKmh5xWExsZuh1mzrIdAmoP9pQdx24A4KLNBgfsREhNNDWSJyxcaSn5+xVwhMPfV5MnhPUc4LPwRwHat9dcASqmlwDXAVr821wCLtdYaWK+USlJKdddafxuG8wtCk7CPI5WWv084IuG+QthITYVWrSpcOnfdFf57Khw1bXsAe/yW91rrQm0DgFLKoZTapJTaVFhYGAbxBKF+uFwwd655B/hFn2vNB12x7N8LqNpeEELBbof58yHO0srPPRf+eykcFr4KsE7Xo41ZqbUTcAIMHz48YBtBaGwCpcR23JFF4f3wXlEu47uk4bgjq8b2YvELoVJUZNw6Hk/jjAuFQ+HvBXr5LfcE9tWjTaMgYXNCfQiUEhvgsaezKC2FjYlw6dVAcc0lEeX+E0KhsTPzhkPhfwico5Q6A/gGuAG4qUqbvwMzLP/+SOBwU/jvxeoS6kugP97ixZVD5rKed/LaGRUlEQf1hi27K0oiyv0nQGgP/cbOzNtgha+1LldKzQDew4Rlvqq1/lQpdYu1/WXgXUxI5nZMWOa0hp63LkjhEqG+BPrjLV5cuc0PuvKErJ9MzOF/uzh87efOlfsv1qnPQ78x08CEJQ5fa/0uRqn7r3vZ77MGpofjXKEghUuEhlD1jzd5simAUlYGCQkw/sJ0cvZX5G2aMDYdu1/1TLn/hPoYnfc+MJK/ntzMxNbJ/P6JDWGVR2kdveOiw4cP15s2bWrQMZxOyMmB9HRwSLYFoYFU7Z67co0Pv/hEMR8f2056n+qDueLDj11CtfAz7upLdoddvuV7ykaErPSVUgVa6+EBt7VkhS8+VKEpcD6TQebhbN9yff6kQsulrg99V66Ti9ZnohW+OtxnH4nnyz+UhXS+mhR+OOLwo5ZgkRaCEE5yduWaD1bw8VPxG02uHUGg8oztmsgvyKkoumPZ4RNbJ4dVlhat8L0+VJn6LjQm6X0sx73GlES0MmoKQiikpqTTxg3KA0rDpCN9wt5TbNHZMqX4uNAUOO7I4qsHvuSp+I1oD7T2K7wjCHWlKWp5tGgfviA0FS4X5C5xcjIxx4rWkQgBoXa8g/7hVPA1+fBbtIUvCE1BRXCAg8REBxPEuBfqgCvXydi1TVtJrUX78AWhKZDgAKE+LFvhN3Evziw3NqLwBaGB1BQc4Mp1Mvfx8RK1I1SjdWk6iW6wuc3Evdaljd81FJeOIDSQYMEBkeiyC82HtBsdvD0JknrmULw3nbTsxr83ROELQhgIlP8kvyB48XNBsNvhxWwH+fmOJosiFIUvCI1Eako6iWsrcu1IqKZQlcZMlBYIUfiC0Eg0RVy1IISCxOELQhiQJGmCl0jfCxKHLwiNSF2T9DmfySBnV26ljJqRVg5CeAl0L1Ds5OV3nuXYcc21yTPJmBG5np4ofEFoIHXJee7LqJkEeYez4RkYdEGWZHNtYVS9F3KXOJnXKZOSrmb7299lwvNETOlLHL4gNJC6JOlb+lXljJrOHctlwlYLpOq9sMvzLCU2zO+uoCwOPtgcucR6YuELQgOpS5K+ASfSWEm2L+3tfzsdY2YHJ4mJDqmI1YLwvxd6dXDyi++2mg3W757ggdHJkYvWkkFbQWgCXC64Y353Nvx4P8SZFLhnHY1naqvrieuQJT78ZkZdxl7mPj6eh8rycFu/95DCttzV/5lGd+fIoK0gRAElGx+lzTmZnAS0gu0dynmQbBa0Brs9q9b9heigtkF6bwbMLu26kViEbx7Gi1c/E/HQXFH4gtAE5OfDlj0OfrwIjk6Yzq7O5b7KRjm7cpEI/ejG36KvaZDe+UwGMw5l41bQqgjmd5lE0bHCqJmHIYO2gtAEeAfzPvvWQZ9115uVljfVVpYoydWiGK9F/9BD5r1Ll8CD9K5cJ9MPZVMWB544KLFB0bFCZj34XlQoexALXxCaBP/BvN27s9D/gKJhy/nitGPkdd1P/tpMSa4WpVS16IuKAg/S5xfk4PErQG7T0ZdOQxS+IDQR3rwpLheMXZTFyLbj+fx0M6h3Eli8cj4kOWQiVpTh7Z35R1MFyoGTmpJOq7V5lGjjOnn+lElR9wAXhS8ITYzX2s9dks46Tx5uZQZxX2u1jfWTnGzZ7ZCJWFFEbWG3Ff59Bysuiu7cSRKWKQgR5NZ7BrCgzVZ0nCmEcUn+OPJXv4dSkJkJL70UaQmFmnC54FeTnL6c9i9mOyL+kK4pLFMGbQUhgkweczut/aoeFe8xPl+t4dVXjUIRopfcJU4+vymT1WPy+PymTHKXRPfguyh8QYgkSQ7Oe30Bl+SP47zXF9Bz6Cv0+XUCo8aOxO2unG7B5YK5c+UhEC24cp2s8cyixK8u7cnEyKVNqAviwxeEJsTr7+3SxUR77N4NW3Y7cO9wMGrsSP4xZCMAuy7eyGg1ktTUDb79JNFa9OAtX1nSBTwK4jymhzZhbHRF5VRFFL4gNBFepV1SAh4PxMVBfLx5AXwx2BqvssL6/pOysSLkL7/2jJxC0+DKdTLzvTs50QmIgzg3XPZDZ2aPnwtJDubOjd4oK1H4gtBEeJW2x2OWPR6jwG++GXr3hr/tbs/+jj/42he3NjM3HXdkBQwNFJoeV66TMesyKUmyVmhI0PiUfbVc+ERXvQNR+ILQRHiVtr+Fn5gIkycbZZCa+yQXrs80jQOkXZgyxbx72wtNT35BDqVx+NJc44FpJf2wpxnL3r8XtngxLFoUXW64Bil8pVRn4A2gL7AT+F+t9aEA7XYCRwA3UB4sZEgQWjL+8dxeH76/5WdPc3DPmleYl7DRl3YhvU9aNf/9sGHRZTXGEqkp6SSuy6PEUviJHrio90yzLbVyLwyizw3XoDh8pdQ84KDW+ndKqfuAU7TW9wZotxMYrrX+PpTjSxy+EGu4XPD6wgw+bZPLDWeZUohz55o8Lm636RXYbKaHEC1WYyzgzYDpTZXw8jvPsnOX5tB/Z7K90OH7HfyTrEFkBtobMz3yNUCq9XkRkA9UU/iCINROhSWfRWIiPGH5gP0tR6WM4vd4osdqbOk8cX8Gj8RbGTDX5bHywgX8uMenZL9sfgubreJ3qJpyobbCOE1NQ+PwT9Vafwtgvf8oSDsN5CmlCpRSNc43Vko5lFKblFKbCgsLGyieIDQfgpU89LqCHnsMXngBWrWquZyiED6ynncaZR8HWBkwF698tk5lLcH8drNmRYeyhzpY+Eqp94HTAmx6IITzXKS13qeU+hHwL6XUZ1rrVYEaaq2dgBOMSyeEcwhCs8Z/UFcp4+f34m85DhoUXVZjS+aDzTl4elMxSAuArlNZy2ikVoWvtb4s2Dal1HdKqe5a62+VUt2BA0GOsc96P6CUWgaMAAIqfEGIVex2mD8fZswwVv7MmUa5V1UmgTI1Co3D6OR0/vxdnilEDsS7YfKYmUDz/B0a6tL5O2AFizEF+FvVBkqpdkqpDt7PwDjgkwaeVxBaJEVFxj/v76MHM2h46z39ufWeAVIsJQzUNU1FxgwHC09dQPru/vzs+36sunBBVGbBrCsNjdLpAvwF6A3sBq7TWh9USp0OLNRaX6GUOhNYZu0SD7yutX6iLseXKB0h1giUQoFiJ6nrMim1rMwED3xgb96KJ5K4XMYNU1YGCQktb+C70aJ0tNZFwNgA6/cBV1ifvwaGNOQ8ghArBPINz308hzK/yT5lcWbgUBR+/Vi82DxQwbzPmwcjRlR83/4hmC3tO5aZtoIQZVT1DaempBPnysOt8Sn97LitqNtH8uKzGyIiY0vi7bfNKzERnPOcOPab3lTi2ryAZSf9Y+2bW89A0iMLQpRjT3Pw4imTsHkAKw/Pkdbw0ikb+dXtIyMqW3Nk8mQT2qqUCavUuiIU9oPNOZT6pTvOLzDpjr0+f6ezckHz5paqWhS+IDQDHHdksdq+gI6WK8Jr6f8tfpPkyQ8Rux1WroQnnoAXX6w8r2F0cjqJfgVpUlPSfeMqDz0E06ebsNmqcyWaC+LSEYQooiZ3gT3NwcB37mZdqyO+db2Ot6s0yDt/fvUcPUJ1gs9rcHBWbuW6tP5J0bypLZRqnhPfROELQpRQlyInT135FJesy8QdBzYPnF/6U9oMH0/hjnS27XMwfbpxUUienbpTLZ4+yQE2BySZxapJ0ZrzQ1UUviBECXUqcpLkYFAWdOqZgz7RjVcuz6akJ8Sl5nHRu6tY+1GW5NkJkUqJ0QLktG+us2oDIQpfEKKE2oqcuFxm9u3HOx2w08Hoi8dTYgNPHHg0uK7IZvDBUWzZ7WiW7oZI4C1V6I3KufsHKC11VHvoNsdZtYEQhS8IUUJNlqTX3XPyZMW673emY9N5eKxwTQ/wk4k5/G8XR7O3RBtKXUMn8wv8onK0KUKemOhosZXFROELQhQRzJL0unv8J8Z/ecDBbd+v4tmu2XiAVlYRbXtaU0kbndRlLOTeB0by15ObGUkPEtsYZe8tQj4hvWW4bwIhCl8QmgH+7p74eJg2zVS+mjkzi37dRtG1bw4/vy6d5WteYfL705nYOpnfPxGbk7JqGwvJuKsv2R12QQJsZxeTjvRhwCnnVZpZ29IUvRdR+ILQDAiYcsEKF9yy24HtGwennzmS7D4bIQHmsREeGBmTSr+msRDnMxlkt99lFqy6wSvZQ9aDO5te0AggCl8QmgnVUi6kVlZsrqTNZoO3APqJzfw+EoJGmGBjIa5cJ786lG2mm/rltz/tcLsISBkZROELQjOlqmJ7wZnM150qCqBfcCg5kuJFlEBjIYtXzsfdFt8DESDODdd1e6qpxYsYovAFoRlTWbFt4JuHR7Kj32bO2JbM9N/GnjunZiqVreLs7xP5RYfnuG9uy8qIWROSS0cQWgh2O8z57QYyu5cx57cbWuzAY325qPfttHKD8kArNzzy49hS9iAWviC0KFrKBKHGYM8RB+cuhs69czi4O509t8SWsgdR+IIg1IHmmAPeletk8cr5gGLymNtJTXXw2GMOSr+J3ZnIovAFoQXRGIq5LhOZog1XrpPR6zIpa2uWX1uXycoLYcUKR7N7cIUTUfiC0EJoLMVcp6RuUca89x6hLAnfOG2pzaRRmPWgI+plb0xk0FYQWgiBFHM46NLF5IGPi2se+WVcuU7e7ri/YoWGOL9iJrFcLEYsfEFoIdSWbbM+eDN0eot/zJ8ffda9vxuLYiez35uFpxO+eHul4cVTJgVNfRxLiMIXhBZCY+Rt9/YaPB5T5amoqOHHDCf+bqxBvZ1snZRJWSfQylj1Ng3PnzIJxx1ZlSpXNRfXVLgRhS8ILYhQwjL9C394k4ZVpTF6DeHE342VeP4jlNrwWfbDD7dl/vhnfNcW7dfSFIjCF4QYpGrhjxUQVOlPmWLeJ09uHIu4IZFFXiU+vH8Ga368v9K25MS+la6pJVWuqi+i8AUhBqla+CO/IKeawq8a9TN5cv3PF0ypNzSyyG4H5zwnPz+QjVb4rHubx8ysnTu38jljfWKaKHxBiEFSU9JJXJvnK/yRmpIOVFbM4QrHrEmph+Mca3c/S7lfUrQ4DRe9O4lfbHFQXm4Gm194ARyxN7G2GqLwBSEGsac5WAGVfPh3/yqD3LLldPihA39xPkrfoQ7iLQ3h7/MOZK3X5JapSak31K/uynXyaqutJvmlZdlf8M4kVn+UBZgKYR4PzJgBgwbFtnUPovAFIWaxpzl8bpwn7s/gDz/KNht6HMN2XibtN89n5MBkep9byPgL07HbHQGtdajZLVOTUvf61RcvrrvclXohBTm4rfz2ygP2zf1Y+1EWiYlQXm4eMmDeYzEqpyqi8AVB4L2iXOiOb2aqOw7WpmxDq22gIaswj1/fm8lP9oygtHSDz1pfvBi+/hpKSowlHcgtU5fB0kWLzL6LFtXsx/c+cM7u5uRf/8xh7EXdSIyrqEl79NOZZGaa8YYtW4xl73ZDq1Y191BiBVH4giAwvksaq8n2FQYB0HGYZeu9uA28ee5GLvxpXxK/P4/iPem8+qrxk3s8Nc/ErWmwNBQ/fn6+Ufbbf5bJVhusd8P8LpPY/k0hrUvTSXvdUWmAdtCgysq9OeYFCiei8AUhRvG3dB+YkwX3w1vFy2kbZ2PTKT9QrsDjVfre2iEaXIN2EccuEt15jPznKuLaFOI+3o0f9SpkwmXG9RPoHMEUayh+/NRU+Oe6ZzlpMw+kUg1FxwqZ99R7AdtXfdA0x7xA4UQUviDEIIEs3QfmZPGAd7s1KevTQ5+T3WFXZcsf4/IpAVxXZONR4LEiZP52II8tD7xCUpskeiWl47jHUas1XReXj/fB0auDk/VDt5oQTA3xuiLCyL9dsOPE+uSrBil8pdR1wGygHzBCa70pSLvLgWcBG7BQa/27hpxXEISGUZul6z+g2+/+DF46tpSTiR5+/FVvNvffRak2nh63fy8gDtwa5sVtJK4MWu3PY3h/0wM4uDud/HxH0Kiemlw+TmeFL370xTm4U/EN0k4r6eeTsy7umliffNVQC/8TYCKwIFgDpZQNeAH4CbAX+FAp9Xet9dYGnlsQhHoSiqUb1yGL/fOycLthvQ3u7+mkzY9y6NKuGzOLsjmJyV0D+GLhPXFQok0PQCuI9+Sx7Zv5uHJnsmWPo9Jgam2DtNOnw3mnOenaN4eyo91IdFcM0k4eM9PXtq7umliefNUgha+13gaglKqp2Qhgu9b6a6vtUuAaQBS+IESIUCzdqg+HtBsdPj/9oNxRLF45n4Wtt1Hul2w9zmN6AB6rB+BWkNVlG2+tzWT4P1cxbMIKjnY8QufN15KfnxXU8s/Ph37dnWyfnMlnNkh0wwOeScS1LqyWAyjW3TV1QWmta29V20GUygfuDuTSUUr9D3C51vqX1vLPgJFa6xm1HXf48OF606aAXiJBEJoIl6siTj5YPh1TTvBZQDOsezJFxworegA2fGkP4jz4egBe0gv7sCehkPjyeOJ39eDQf2eyvdDhi/G/b84AVidvRceBzQ2PJY5j1oOBB2krpUomNl03SqkCrfXwQNtqtfCVUu8DpwXY9IDW+m91OX+AdUGfMkopB+AA6N27dx0OLwhCY1HXfDr+Pn9/TA/gWV5ttRW3MlZ/mTfnDYCGnG67Knbo+gO2YZlc+O4q8vOzSB1a8yBtNTnsEn5ZE7VWvNJaX6a1HhjgVRdlD8Zv38tvuSewr4bzObXWw7XWw7t161bHUwiC0Bg0tIqWPc3BS/M+Jf/CBTyWOI7nT5lEvGXlVzL7VMXLHWd8/706OCvPpNUVg7S1Va5qrOpfzZ2mCMv8EDhHKXUG8A1wA3BTE5xXEIQGUh+/eKDQSP8ewKDcUcx77xH2cYRzbF2rhX1ihXnuKc6pluRt8piZQa13//OKPz8wDQ3LnAA8B3QD3lFKfay1Hq+UOh0TfnmF1rpcKTUDeA8Tlvmq1vrTBksuCEKjE2oYY51CI9McLPMLpdxzWwaFQ5fhjvPw1akn0QpauSH1gvSASd4CVa6C6ueN5fDLYDQ0SmcZsCzA+n3AFX7L7wLvNuRcgiA0LsEmLYUSxhjqTNb8fFj7URbuTWCzwf2/MiGfXmUP1ccHAlnvgc47a5Yo+qrITFtBEMI2yFlXV4r34dKlS0V7mw0KyxxMHuuo8dzBeh3iwqkdUfiCIIQtx0xd0yT4P1zmz4ePPoLXXoM//rH2jJne84SakVMQhS8IAuEd5KzNBVT14VJUBL17V+Svr+8DJ5Zn0NYVUfiCIDTYQg4lx3ywh4u4ZBofUfiCIAD1t5C9LpqSkrrVjw32cBGXTOMjCl8QhAaRn19R8aqu9WMDPVzEJdP41DrTVhAEoSZSU41l78VbP1aIPkThC4LQIOx248ZJSDCK379+rBBdiEtHEIQG43BUrx8rRB+i8AVBCAvig49+xKUjCIIQI4jCFwRBiBFE4QuCIMQIovAFQRBiBFH4giAIMYIofEEQhBhBaR20nnjEUUoVArtqbVidrsD3YRanKWnu8oNcQzTQ3OUHuYb60EdrHbAgeFQr/PqilNqktR4eaTnqS3OXH+QaooHmLj/INYQbcekIgiDECKLwBUEQYoSWqvCdkRaggTR3+UGuIRpo7vKDXENYaZE+fEEQBKE6LdXCFwRBEKogCl8QBCFGaFEKXyl1uVLqc6XUdqXUfZGWJ1SUUq8qpQ4opT6JtCz1QSnVSym1Uim1TSn1qVLq9kjLFCpKqdZKqY1Kqf9Y1/BopGWqD0opm1LqI6XUPyItS31RSu1USm1RSn2slNoUaXlCRSmVpJR6Syn1mfWfiHjy6Bbjw1dK2YAvgJ8Ae4EPgRu11lsjKlgIKKVGAUeBxVrrgZGWJ1SUUt2B7lrrzUqpDkABcG0z+w0U0E5rfVQplQCsAW7XWq+PsGghoZS6ExgOdNRaXxVpeeqDUmonMFxr3SwnXimlFgGrtdYLlVKJQFutdXEkZWpJFv4IYLvW+mutdSmwFLgmwjKFhNZ6FXAw0nLUF631t1rrzdbnI8A2oEdkpQoNbThqLSZYr2ZlFSmlegJXAgsjLUusopTqCIwCXgHQWpdGWtlDy1L4PYA9fst7aWbKpiWhlOoLDAM2RFiUkLHcIR8DB4B/aa2b2zXMB+4BPBGWo6FoIE8pVaCUckRamBA5EygEXrNcawuVUu0iLVRLUvgqwLpmZZm1FJRS7YEcYKbW+odIyxMqWmu31noo0BMYoZRqNu41pdRVwAGtdUGkZQkDF2mtk4E0YLrl8mwuxAPJwEta62HAMSDi44otSeHvBXr5LfcE9kVIlpjF8nvnANla679GWp6GYHXB84HLIytJSFwE/NTyfy8FLlVKZUVWpPqhtd5nvR8AlmHcts2FvcBev97hW5gHQERpSQr/Q+AcpdQZ1gDJDcDfIyxTTGENeL4CbNNaPx1peeqDUqqbUirJ+twGuAz4LKJChYDWepbWuqfWui/mP/BvrXVGhMUKGaVUO2vgH8sVMg5oNtFrWuv9wB6l1HnWqrFAxIMX4iMtQLjQWpcrpWYA7wE24FWt9acRFisklFJLgFSgq1JqL/CI1vqVyEoVEhcBPwO2WD5wgPu11u9GTqSQ6Q4ssqK+4oC/aK2bbWhjM+ZUYJmxIYgHXtda/zOyIoXMr4FsywD9GpgWYXlaTlimIAiCUDMtyaUjCIIg1IAofEEQhBhBFL4gCEKMIApfEAQhRhCFLwiCECOIwhcEQYgRROELgiDECP8PxbM7ar5fiGMAAAAASUVORK5CYII=",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "keras_test_pred = keras_model.predict(x_test)\n",
    "\n",
    "x_te = torch.tensor(x_test).resize(test_set_length, 1)\n",
    "y_te = torch.tensor(y_test).resize(test_set_length, 1)\n",
    "pytorch_test_pred = pytorch_model(x_te)\n",
    "test_loss = criterion(pytorch_test_pred, y_te)\n",
    "\n",
    "pytorch_test_pred = pytorch_test_pred.detach().numpy()\n",
    "# Graph the predictions against the actual values\n",
    "plt.clf()\n",
    "plt.title('Comparison of predictions and actual values')\n",
    "plt.plot(x_test, y_test, 'b.', label='Actual values')\n",
    "plt.plot(x_test, keras_test_pred, 'r.', label='keras predicted')\n",
    "plt.plot(x_test, pytorch_test_pred, 'g.', label='pytorch predicted')\n",
    "plt.legend()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "使用tinymlgen这个包可以直接将keras模型转换成ESP32可读的C语言头文件，头文件中包含训练好的神经网络模型参数，这样就可以不使用xxd来生成C语言头文件了"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:absl:Optimization option OPTIMIZE_FOR_SIZE is deprecated, please use optimizations=[Optimize.DEFAULT] instead.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Assets written to: C:\\Users\\WJ\\AppData\\Local\\Temp\\tmpklf8w2o0\\assets\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Assets written to: C:\\Users\\WJ\\AppData\\Local\\Temp\\tmpklf8w2o0\\assets\n",
      "WARNING:absl:Optimization option OPTIMIZE_FOR_SIZE is deprecated, please use optimizations=[Optimize.DEFAULT] instead.\n",
      "WARNING:absl:Optimization option OPTIMIZE_FOR_SIZE is deprecated, please use optimizations=[Optimize.DEFAULT] instead.\n"
     ]
    }
   ],
   "source": [
    "c_code = port(keras_model, optimize=True)\n",
    "with open('nodemcu32s_sin_led/nn_model.h', 'w') as f:\n",
    "    f.write(c_code)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3.9.12 ('base')",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.12"
  },
  "orig_nbformat": 4,
  "vscode": {
   "interpreter": {
    "hash": "b09ec625f77bf4fd762565a912b97636504ad6ec901eb2d0f4cf5a7de23e1ee5"
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
