{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "7295d9e3",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "import os\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import gc\n",
    "import matplotlib.pyplot as plt\n",
    "from matplotlib import font_manager\n",
    "import tensorflow as tf\n",
    "from tensorflow.keras import layers\n",
    "from tensorflow import keras\n",
    "from scipy import stats\n",
    "from tensorflow.python.ops import math_ops\n",
    "from tensorflow.python.keras import backend as K\n",
    "\n",
    "from tensorflow.python.keras.utils.generic_utils import get_custom_objects\n",
    "from tensorflow.python.keras.layers import Activation\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0f614716",
   "metadata": {},
   "outputs": [],
   "source": [
    "def swish(x):\n",
    "        return (K.sigmoid(x) * x)\n",
    "\n",
    "get_custom_objects().update({'swish': Activation(swish)})\n",
    "\n",
    "# def correlation(x, y, axis=-2):\n",
    "#     \"\"\"Metric returning the Pearson correlation coefficient of two tensors over some axis, default -2.\"\"\"\n",
    "#     x = tf.convert_to_tensor(x)\n",
    "#     y = math_ops.cast(y, x.dtype)\n",
    "#     n = tf.cast(tf.shape(x)[axis], x.dtype)\n",
    "#     xsum = tf.reduce_sum(x, axis=axis)\n",
    "#     ysum = tf.reduce_sum(y, axis=axis)\n",
    "#     xmean = xsum / n\n",
    "#     ymean = ysum / n\n",
    "    \n",
    "#     xvar = tf.reduce_sum( tf.math.squared_difference(x, xmean), axis=axis)\n",
    "#     yvar = tf.reduce_sum( tf.math.squared_difference(y, ymean), axis=axis)\n",
    "\n",
    "#     cov = tf.reduce_sum( (x - xmean) * (y - ymean), axis=axis)\n",
    "#     corr = cov / tf.sqrt(xvar * yvar)\n",
    "#     return tf.constant(1.0, dtype=x.dtype) - corr\n",
    "\n",
    "def get_model():\n",
    "    features_inputs = tf.keras.Input((5000,2 ), dtype=tf.float32)\n",
    "\n",
    "    ## feature ##\n",
    "#     feature_x = layers.Dense(2000, activation='swish')(features_inputs)\n",
    "    feature_x = layers.Dropout(0.1)(features_inputs)\n",
    "    ## convolution 1 ##\n",
    "    feature_x = layers.Reshape((-1,1))(feature_x)\n",
    "    ## convolution 1 ##\n",
    "    feature_x = layers.Conv1D(filters=500, kernel_size=21, strides=5, padding='same', activation=\"relu\")(feature_x)\n",
    "    feature_x = layers.BatchNormalization()(feature_x)\n",
    "#     feature_x = layers.LeakyReLU()(feature_x)\n",
    "    ## convolution 2 ##\n",
    "    feature_x = layers.Conv1D(filters=500, kernel_size=21, strides=1, padding='same', activation=\"relu\")(feature_x)\n",
    "    feature_x = layers.BatchNormalization()(feature_x)\n",
    "#     feature_x = layers.LeakyReLU()(feature_x)\n",
    "    ## 当padding为VALID，也就是无填充时，输出张量的大小为(输入张量的大小-pool_size + 1 )/ strides，当padding为SAME时，输出张量的大小为输入张量的大小 / strides。\n",
    "    feature_x = layers.MaxPool1D(pool_size=2,strides=None,padding='valid')(feature_x)\n",
    "\n",
    "    ## convolution 3 ##\n",
    "    feature_x = layers.Conv1D(filters=1000, kernel_size=5, strides=1, padding='same', activation=\"relu\")(feature_x)\n",
    "    feature_x = layers.BatchNormalization()(feature_x)\n",
    "#     feature_x = layers.LeakyReLU()(feature_x)\n",
    "    ## convolution 4 ##\n",
    "    feature_x = layers.Conv1D(filters=1000, kernel_size=5, strides=1, padding='same', activation=\"relu\")(feature_x)\n",
    "    feature_x = layers.BatchNormalization()(feature_x)\n",
    "#     feature_x = layers.LeakyReLU()(feature_x)\n",
    "    feature_x = layers.MaxPool1D(pool_size=2,strides=None,padding='valid')(feature_x)\n",
    "    \n",
    "    ## convolution 5 ##\n",
    "    feature_x = layers.Conv1D(filters=2000, kernel_size=5, strides=1, padding='same', activation=\"relu\")(feature_x)\n",
    "    feature_x = layers.BatchNormalization()(feature_x)\n",
    "#     feature_x = layers.LeakyReLU()(feature_x)\n",
    "    ## convolution 6 ##\n",
    "    feature_x = layers.Conv1D(filters=2000, kernel_size=5, strides=1, padding='same', activation=\"relu\")(feature_x)\n",
    "    feature_x = layers.BatchNormalization()(feature_x)\n",
    "#     feature_x = layers.LeakyReLU()(feature_x)\n",
    "    feature_x = layers.GlobalAveragePooling1D()(feature_x)\n",
    "    \n",
    "    ## flatten ##\n",
    "    feature_x = layers.Flatten()(feature_x)\n",
    "    \n",
    "#     DEPTH = 32\n",
    "#     feature_x = GRU(DEPTH, dropout = 0.1, recurrent_dropout = 0.5)(feature_x)\n",
    "\n",
    "#     x = layers.Dense(16000, activation='swish')(feature_x)\n",
    "#     x = layers.Dropout(0.1)(x)\n",
    "#     x = layers.Dense(2000, activation='swish')(feature_x)\n",
    "#     x = layers.Dropout(0.1)(x)\n",
    "#     x = layers.Dense(500, activation='swish')(x)\n",
    "    x = layers.Dropout(0.1)(feature_x)\n",
    "    output = layers.Dense(1)(x)\n",
    "    rmse = keras.metrics.RootMeanSquaredError(name=\"rmse\")\n",
    "    model = tf.keras.Model(inputs=[features_inputs], outputs=[output])\n",
    "    model.compile(optimizer=tf.keras.optimizers.Adam(0.0005), loss='mse', metrics=['mse', \"mae\", \"mape\", rmse])\n",
    "    return model\n",
    "model = get_model()\n",
    "model.summary()\n",
    "keras.utils.plot_model(model, show_shapes=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "325634c7",
   "metadata": {},
   "outputs": [],
   "source": [
    "tbCallBack = tf.keras.callbacks.TensorBoard(log_dir=\"./logs/cnn\")\n",
    "#调用fit方法后，进入localhost:6006查看结果。\n",
    "\n",
    "model = get_model()\n",
    "history = model.fit(  #使用model.fit()方法来执行训练过程，\n",
    "    x_train, y1_train, #告知训练集的输入以及标签，\n",
    "    batch_size = 32, #每一批batch的大小为32，(*****一般为32，64，128，16，配置实在不行再调成1)\n",
    "    epochs = 500, #迭代次数epochs为500（测试的时候调小一点）\n",
    "    validation_split = 0.15, #从测试集中划分80%给训练集\n",
    "#      callbacks=[\n",
    "#         tf.keras.callbacks.EarlyStopping(\n",
    "#             monitor='val_mse',\n",
    "#             patience=30,\n",
    "#             restore_best_weights=True\n",
    "#         )\n",
    "#     ],\n",
    "    callbacks=[tbCallBack],\n",
    "    shuffle=True,\n",
    "    validation_freq = 20) #测试的间隔次数为20"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "92f31dfd",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 模型的保存\n",
    "model.save(r'model_data/cnn.h5')\n",
    "# 保存训练的历史记录pickle.dump()\n",
    "import pickle\n",
    "with open('model_data/cnn.txt', 'wb') as file_txt:\n",
    "    pickle.dump(history.history, file_txt)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "60f4e499",
   "metadata": {},
   "outputs": [],
   "source": [
    "with open('log.txt','rb') as file_txt:\n",
    "    history=pickle.load(file_txt)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e1ff5a39",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
