{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 提升树算法"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "提升方法实际上是采用加法模型，即基函数的线性组合与前向分布算法来进行的一类算法，事实证明，该算法具有很好的拟合性能。\n",
    "算法如下\n",
    "\n",
    "1. 初始化 $f_0(x)=0$\n",
    "2. 对m=1,2,3,...M求残差（一般为平方误差 如果是分类问题就交叉熵误差）\n",
    "3. 拟合残差学习新的回归树\n",
    "4. 更新 $f_m(x)=f_{m-1}(x)+T(x;\\theta_m)$\n",
    "5. 直到误差小于我们规定的误差停止\n",
    "\n",
    "\n",
    "上面的是普通提升树的算法，而GBDT为梯度提升，什么意思呢，在刚刚算的残差之后，我们使用的简单的损失函数，如果是复杂的损失函数，优化并不简单的，于是提出了一种梯度上升算法，也就是使用损失函数的负梯度在当前模型的值作为回归树问题算法的残差的近似值，来拟合回归树。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import math"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "\n",
    "# 计算信息熵\n",
    "def calculate_entropy(y):\n",
    "    log2 = math.log2\n",
    "    unique_labels = np.unique(y)\n",
    "    entropy = 0\n",
    "    for label in unique_labels:\n",
    "        count = len(y[y == label])\n",
    "        p = count / len(y)\n",
    "        entropy += -p * log2(p)\n",
    "    return entropy\n",
    "# 定义树的节点\n",
    "class DecisionNode():\n",
    "    def __init__(self, feature_i=None, threshold=None,\n",
    "                 value=None, true_branch=None, false_branch=None):\n",
    "        self.feature_i = feature_i          \n",
    "        self.threshold = threshold         \n",
    "        self.value = value                 \n",
    "        self.true_branch = true_branch     \n",
    "        self.false_branch = false_branch\n",
    "def divide_on_feature(X, feature_i, threshold):\n",
    "    split_func = None\n",
    "    if isinstance(threshold, int) or isinstance(threshold, float):\n",
    "        split_func = lambda sample: sample[feature_i] >= threshold\n",
    "    else:\n",
    "        split_func = lambda sample: sample[feature_i] == threshold\n",
    "\n",
    "    X_1 = np.array([sample for sample in X if split_func(sample)])\n",
    "    X_2 = np.array([sample for sample in X if not split_func(sample)])\n",
    "\n",
    "    return np.array([X_1, X_2])\n",
    "# 超类\n",
    "class DecisionTree(object):\n",
    "    def __init__(self, min_samples_split=2, min_impurity=1e-7,\n",
    "                 max_depth=float(\"inf\"), loss=None):\n",
    "        self.root = None  #根节点\n",
    "        self.min_samples_split = min_samples_split\n",
    "        self.min_impurity = min_impurity\n",
    "        self.max_depth = max_depth\n",
    "        # 计算值 如果是分类问题就是信息增益，回归问题就基尼指数\n",
    "        self._impurity_calculation = None\n",
    "        self._leaf_value_calculation = None #计算叶子\n",
    "        self.one_dim = None\n",
    "        self.loss = loss\n",
    "\n",
    "    def fit(self, X, y, loss=None):\n",
    "        self.one_dim = len(np.shape(y)) == 1\n",
    "        self.root = self._build_tree(X, y)\n",
    "        self.loss=None\n",
    "\n",
    "    def _build_tree(self, X, y, current_depth=0):\n",
    "        \"\"\"\n",
    "        递归求解树\n",
    "        \"\"\"\n",
    "\n",
    "        largest_impurity = 0\n",
    "        best_criteria = None\n",
    "        best_sets = None\n",
    "        \n",
    "        if len(np.shape(y)) == 1:\n",
    "            y = np.expand_dims(y, axis=1)\n",
    "\n",
    "        Xy = np.concatenate((X, y), axis=1)\n",
    "\n",
    "        n_samples, n_features = np.shape(X)\n",
    "\n",
    "        if n_samples >= self.min_samples_split and current_depth <= self.max_depth:\n",
    "            # 计算每一个特征的增益值\n",
    "            for feature_i in range(n_features):\n",
    "                feature_values = np.expand_dims(X[:, feature_i], axis=1)\n",
    "                unique_values = np.unique(feature_values)\n",
    "\n",
    "                for threshold in unique_values:\n",
    "                    Xy1, Xy2 = divide_on_feature(Xy, feature_i, threshold)\n",
    "                    \n",
    "                    if len(Xy1) > 0 and len(Xy2) > 0:\n",
    "                        y1 = Xy1[:, n_features:]\n",
    "                        y2 = Xy2[:, n_features:]\n",
    "\n",
    "                        # 计算增益值\n",
    "                        impurity = self._impurity_calculation(y, y1, y2)\n",
    "\n",
    "                        if impurity > largest_impurity:\n",
    "                            largest_impurity = impurity\n",
    "                            best_criteria = {\"feature_i\": feature_i, \"threshold\": threshold}\n",
    "                            best_sets = {\n",
    "                                \"leftX\": Xy1[:, :n_features],  \n",
    "                                \"lefty\": Xy1[:, n_features:],   \n",
    "                                \"rightX\": Xy2[:, :n_features],  \n",
    "                                \"righty\": Xy2[:, n_features:]   \n",
    "                                }\n",
    "\n",
    "        if largest_impurity > self.min_impurity:\n",
    "            true_branch = self._build_tree(best_sets[\"leftX\"], best_sets[\"lefty\"], current_depth + 1)\n",
    "            false_branch = self._build_tree(best_sets[\"rightX\"], best_sets[\"righty\"], current_depth + 1)\n",
    "            return DecisionNode(feature_i=best_criteria[\"feature_i\"], threshold=best_criteria[\n",
    "                                \"threshold\"], true_branch=true_branch, false_branch=false_branch)\n",
    "        \n",
    "        # 计算节点的目标值\n",
    "        leaf_value = self._leaf_value_calculation(y)\n",
    "        \n",
    "        \n",
    "        return DecisionNode(value=leaf_value)\n",
    "\n",
    "\n",
    "    def predict_value(self, x, tree=None):\n",
    "        \"\"\"\n",
    "        预测\n",
    "        \"\"\"\n",
    "\n",
    "        if tree is None:\n",
    "            tree = self.root\n",
    "\n",
    "        if tree.value is not None:\n",
    "            return tree.value\n",
    "\n",
    "        feature_value = x[tree.feature_i]\n",
    "\n",
    "        branch = tree.false_branch\n",
    "        if isinstance(feature_value, int) or isinstance(feature_value, float):\n",
    "            if feature_value >= tree.threshold:\n",
    "                branch = tree.true_branch\n",
    "        elif feature_value == tree.threshold:\n",
    "            branch = tree.true_branch\n",
    "\n",
    "        return self.predict_value(x, branch)\n",
    "\n",
    "    def predict(self, X):\n",
    "        y_pred = []\n",
    "        for x in X:\n",
    "            y_pred.append(self.predict_value(x))\n",
    "        return y_pred\n",
    "def calculate_variance(X):\n",
    "    \"\"\" Return the variance of the features in dataset X \"\"\"\n",
    "    mean = np.ones(np.shape(X)) * X.mean(0)\n",
    "    n_samples = np.shape(X)[0]\n",
    "    variance = (1 / n_samples) * np.diag((X - mean).T.dot(X - mean))\n",
    "    \n",
    "    return variance\n",
    "class RegressionTree(DecisionTree):\n",
    "    def _calculate_variance_reduction(self, y, y1, y2):\n",
    "        var_tot = calculate_variance(y)\n",
    "        var_1 = calculate_variance(y1)\n",
    "        var_2 = calculate_variance(y2)\n",
    "        frac_1 = len(y1) / len(y)\n",
    "        frac_2 = len(y2) / len(y)\n",
    "\n",
    "        # 使用方差缩减\n",
    "        variance_reduction = var_tot - (frac_1 * var_1 + frac_2 * var_2)\n",
    "\n",
    "        return sum(variance_reduction)\n",
    "\n",
    "    def _mean_of_y(self, y):\n",
    "        value = np.mean(y, axis=0)\n",
    "        return value if len(value) > 1 else value[0]\n",
    "\n",
    "    def fit(self, X, y):\n",
    "        self._impurity_calculation = self._calculate_variance_reduction\n",
    "        self._leaf_value_calculation = self._mean_of_y\n",
    "        super(RegressionTree, self).fit(X, y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class GradientBoosting(object):\n",
    "    def __init__(self, n_estimators, learning_rate, min_samples_split,\n",
    "                 min_impurity, max_depth, regression):\n",
    "        self.n_estimators = n_estimators\n",
    "        self.learning_rate = learning_rate\n",
    "        self.min_samples_split = min_samples_split\n",
    "        self.min_impurity = min_impurity\n",
    "        self.max_depth = max_depth\n",
    "        self.regression = regression\n",
    "        \n",
    "        self.loss = SquareLoss()\n",
    "        if not self.regression:\n",
    "            self.loss = CrossEntropy()\n",
    "\n",
    "        self.trees = []\n",
    "        for _ in range(n_estimators):\n",
    "            tree = RegressionTree(\n",
    "                    min_samples_split=self.min_samples_split,\n",
    "                    min_impurity=min_impurity,\n",
    "                    max_depth=self.max_depth)\n",
    "            self.trees.append(tree)\n",
    "\n",
    "\n",
    "    def fit(self, X, y):\n",
    "        y_pred = np.full(np.shape(y), np.mean(y, axis=0))\n",
    "        for i in range(self.n_estimators):\n",
    "            gradient = self.loss.gradient(y, y_pred)\n",
    "            self.trees[i].fit(X, gradient)\n",
    "            update = self.trees[i].predict(X)\n",
    "            # Update y prediction\n",
    "            y_pred -= np.multiply(self.learning_rate, update)\n",
    "\n",
    "\n",
    "    def predict(self, X):\n",
    "        y_pred = np.array([])\n",
    "        for tree in self.trees:\n",
    "            update = tree.predict(X)\n",
    "            update = np.multiply(self.learning_rate, update)\n",
    "            y_pred = -update if not y_pred.any() else y_pred - update\n",
    "\n",
    "        if not self.regression:\n",
    "            y_pred = np.exp(y_pred) / np.expand_dims(np.sum(np.exp(y_pred), axis=1), axis=1)\n",
    "            y_pred = np.argmax(y_pred, axis=1)\n",
    "        return y_pred"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class Loss(object):\n",
    "    def loss(self, y_true, y_pred):\n",
    "        return NotImplementedError()\n",
    "\n",
    "    def gradient(self, y, y_pred):\n",
    "        raise NotImplementedError()\n",
    "\n",
    "    def acc(self, y, y_pred):\n",
    "        return 0\n",
    "# 如果是回归模型\n",
    "class SquareLoss(Loss):\n",
    "    def __init__(self): pass\n",
    "\n",
    "    def loss(self, y, y_pred):\n",
    "        return 0.5 * np.power((y - y_pred), 2)\n",
    "\n",
    "    def gradient(self, y, y_pred):\n",
    "        return -(y - y_pred)\n",
    "# 如果是分类模型\n",
    "class CrossEntropy(Loss):\n",
    "    def __init__(self): pass\n",
    "\n",
    "    def loss(self, y, p):\n",
    "        # Avoid division by zero\n",
    "        p = np.clip(p, 1e-15, 1 - 1e-15)\n",
    "        return - y * np.log(p) - (1 - y) * np.log(1 - p)\n",
    "\n",
    "    def acc(self, y, p):\n",
    "        return accuracy_score(np.argmax(y, axis=1), np.argmax(p, axis=1))\n",
    "\n",
    "    def gradient(self, y, p):\n",
    "        # Avoid division by zero\n",
    "        p = np.clip(p, 1e-15, 1 - 1e-15)\n",
    "        return - (y / p) + (1 - y) / (1 - p)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class GradientBoostingRegressor(GradientBoosting):\n",
    "    def __init__(self, n_estimators=200, learning_rate=0.5, min_samples_split=2,\n",
    "                 min_var_red=1e-7, max_depth=4, debug=False):\n",
    "        super(GradientBoostingRegressor, self).__init__(n_estimators=n_estimators, \n",
    "            learning_rate=learning_rate, \n",
    "            min_samples_split=min_samples_split, \n",
    "            min_impurity=min_var_red,\n",
    "            max_depth=max_depth,\n",
    "            regression=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "data = pd.read_csv('TempLinkoping2016.txt', sep=\"\\t\")\n",
    "\n",
    "time = np.atleast_2d(data[\"time\"].as_matrix()).T\n",
    "temp = np.atleast_2d(data[\"temp\"].as_matrix()).T\n",
    "\n",
    "X = time.reshape((-1, 1))               # Time. Fraction of the year [0, 1]\n",
    "X = np.insert(X, 0, values=1, axis=1)   # Insert bias term\n",
    "y = temp[:, 0]    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn.cross_validation import train_test_split"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "class Loss(object):\n",
    "    def loss(self, y_true, y_pred):\n",
    "        return NotImplementedError()\n",
    "\n",
    "    def gradient(self, y, y_pred):\n",
    "        raise NotImplementedError()\n",
    "\n",
    "    def acc(self, y, y_pred):\n",
    "        return 0\n",
    "class SquareLoss(Loss):\n",
    "    def __init__(self): pass\n",
    "\n",
    "    def loss(self, y, y_pred):\n",
    "        return 0.5 * np.power((y - y_pred), 2)\n",
    "\n",
    "    def gradient(self, y, y_pred):\n",
    "        return -(y - y_pred)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = GradientBoostingRegressor()\n",
    "model.fit(X_train, y_train)\n",
    "y_pred = model.predict(X_test)\n",
    "\n",
    "y_pred_line = model.predict(X)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn.metrics import mean_squared_error"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "mse = mean_squared_error(y_test, y_pred)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "from matplotlib import pyplot as plt\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAD8CAYAAAB0IB+mAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvX+UI2d15/15Rhq1EQ7NDxtjHI9t2tAYBmPSDeMsOdkJ\nHgTIdoF/jEMf4pPdzTlu3qNhMxCOjbvX/PLbbOxdsn43PSGNCb3YcJrMMLCqZDq4mE4mkOyxwwwM\nMOBp7LYzNqxjYwJNguLuac3z/lFVUqlUJZWk0o+W7uecOt0qlaoelapuPc997v1epbVGEARB6H+2\ndLsBgiAIQmcQgy8IgjAgiMEXBEEYEMTgC4IgDAhi8AVBEAYEMfiCIAgDghh8QRCEAUEMviAIwoAg\nBl8QBGFASHa7AV7OOeccffHFF3e7GYIgCJuKY8eOPau1Prfedj1l8C+++GKOHj3a7WYIgiBsKpRS\np6JsJy4dQRCEAUEMviAIwoAgBl8QBGFAEIMvCIIwIIjBFwRBGBDE4AuCIAwIYvAFQRAGBDH4giAI\nA4IYfEHoMOayyZ59V2N+4GowzW43RxggeirTVhD6HXPZZOLPb6Kg15h/HizccRiDA2AY3W6aMABI\nD18QOoi1YlHQawAUUmBdsA6W1eVWCYOCGHxB6CCZkQxpNQRAeh0yP05BJtO+A5om7NkjriMBAKW1\n7nYbSoyPj2sRTxO6jmnave5Mpi2uFnPZxDo8R2YFjJ2T7XPnmCZMTEChAOk0LCyI66hPUUod01qP\n19tOfPiC4MVrJOfn22IkjVEDY7QDhtey7O8B9l/LEoM/4IhLRxC8BBnJzUomY/fswf7bTteRsCkQ\ngy8IXvrJSBqGPULJ5cSdIwDiwxeEatrswxeEuBEfviA0i2GIoRf6EnHpCIIHc9lkz+IezOWYwxgl\nPFLoAcTgC4KDuWwycXCCfd/cx8TBifiMvhv5s2+f/VeMvtAlxOALgoO1YlE4bUfoFE4XsFZiitDx\nRP6YFxbY8/fT8Y8gBCECLRt8pdSFSqm/UUr9QCn1faXU7zvrX6yU+ppS6hHn74tab64gtI/MSIb0\nVjtCJ701TWYkpggdJ/LHHIWJG2Ff+kS8I4gwxI0k+Iijh78B/IHW+jXAlUBOKfUa4EPAktb6lcCS\n81oQehZj1GDhhgVyb8yxcMNCfMlRTnik9c7tFLbaq2IdQQQhbiQhgJYNvtb6Ka31t5z//wV4GLgA\neCfwOWezzwHvavVYwmASZSI1rslWY9RgNjsbayasuWyyJ2kx/DajPIJQQ2QOP94+Q9xPCWRCbMQa\nh6+Uuhj4OrAdeEJr/UJnvQJ+5r72feYW4BaAbdu2jZ06dSq29gibH3citXC6QHprOrDnHWWbbuFv\n294r97L68HEynz6M8d319mnciI7OQBE1Dj+2SVul1NnAQWCv1voX3ve0/VQJfLJorT+ttR7XWo+f\ne+65cTVH6BOqJlIPz9Xfpp2ukgbxt231AZPZRWxjD+3rfUuWrRBALAZfKbUV29h/QWv9ZWf100qp\n8533zweeieNYwmBRJSf86cOVbhDT5OlvfLXiM8NnDbd+4JgmPCsmgk9DJn8CDh+GIfs7tVW+wTBg\ndlaMvVAijigdBfwZ8LDW+o88b5nA7zr//y6Qb/VYwuBhjBos/Pwqcg/BwkGnZ+z2iE0T847dfGXr\nSsVnVp9bbe2gMU54liaCC9tZ+BIYy8D6Olx1VW/1viWiZyCIo4f/ZuBm4C1KqePOkgX+EHirUuoR\nYJfzWhAaxtg5yezfpm1j6e0RWxbWBesUE+Vtk1uSrYdTxjzhaYwazL55BuNJjyjb5CTMzmKO0p7M\n3kbwPODMO3bb9XYlT6AvaVlLR2v9d4AKefuqVvcvCCV/tF/QLJMhc8e9zK+vU0hBgi3c+uZbW5+w\nzWRsLXx3wjMOl0vAd/BO6N77rXvZdckuJscnOz/h7DzgzFGYuGadwrOLzB880lOT30I8iFqmsLkx\nTcwjc1gjkNkVo7FsUDHTXDaxViwyI5nIbdizuId939xXsa4rUUZOD3/Pvy+wb0d5dfbSLJf8HIZP\nPc3qRefFe36FWIkapSMGXxAcmjHa7ueaCQv1fs5L7o05ZrOzDbe/1nHqfq/pacyv/CET152hkIIh\nlURrWGfDjq9Tdu7Awm/vF6Pfg3Q8LFMQNjOtCKc1GxbqTuiOnT9GQtkTESVJh5gmUf3fa/qvp4Pn\nDFZXMR4+w8JByD0EVz33ctvYQ8lhW9BrPRXyKjSOGHyhL2k087aVWP5mNXjMZZO5o3OceOYERV0k\noRLsvXKvPTk9MYF5eB97PnM95v3Tkdvix/+97vq7u4Ifao7ej7EMs3+bZvKy3ymFw7oZNGk1FJ++\nkNAVpACK0NM042bxukrmj89XuFjc/Q2fNczqc6ul/WZGMswfny+5ZfyGrVY73J56I+0McucUddEO\nKbUszAsLTNwAhVSR+ZW7WVje0ZQrxfu9kluSbJyxe+3uQ620T9+ksmEYLCzvwDo8Jz78PkIMvtCz\n1DLctQjqrRujRqCR9e43zGhHaYcxaoS2Lehh4W2jS+lBkwHriT+lkCra30FtVBrnOufMeyzv9xo+\na5h7Hrwn9KHmr/RV6zsJmxMx+ELPEma46xHWWw8yst79hhm4ZtsB4Q8LbxtTiVRlSOYoZFZvY37l\nbgpqI7KbyH+svVfuLY1i3EngHRfsaGpiWugPxOALXSGKq6aemyWMsN66d38uUfbbbDsg/GFRzw1k\n3Dxju1QaMM7+Y93993ezcWaj4kETqddumjDnaBZNTvZGJrAQCxKWKXScRsIYG/LhR4idD/PhB33W\ne2ygbjuC2hqLkmfEnADvsRIqQVEXS+9FDvU0TbjpJlhbs1+nUnDggBj9Hkfi8IWexZ9w1HTcudcQ\nQtNywOb901gH7iLzw6Itf7CwwPTZD5V6yK6hhhCj7yR/TbxwiYJeqzLsUR9agdtNT8Ndd0GxGOl7\nlR5oTzzNPf/0v0suoYoHTa0HyJ49toaQl1zOFmETepaoBl9cOkLHGX7i6VIyDzSpbunVe5+fh507\nq/VvImbITqzcTWGsyPzrYOFgAY7McdcLHyj1kAunC8wdnePIqSPVE7eOQbYyRQpOlqor4+wa2Chu\nlEBf/zJw9922sfd+Lwg12MaoYX/uDybYceEG1qsSZHbvrTT23vPmf4BkMvCZz1T28Nul5il0HInD\nFzqLabK6+JUK9aWm1C39Amdg94Ddv1GMlGky97n3UVBOqGIKrFclsEaocIckt9j9oqo4fdMsGeTM\nY7Z8M8CWInz1h4tc/eFLMT9wdaTkqSpf/2enbT/6xkZ5o0QChofrx+g758ZYhtm/KGI8tFr1nn2g\ngv3A8rbPMGD/fshm7UXcOX2FGHyhs1gWmR8WS8YxrZtUt3QSheydOOqTjRT8cKSVlxJPlFal9BYy\nu28js2uylEiVUAluffOtTI5PVidXeQyysQx7H7SN/ZkErLwYFhMr3PD8Ra4+cF3d5KlA3Xy/Sud1\n18HqailGf99YkYmVu6uTy/znxvvw874HcOJEtQS0YcChQ/Yixr6vEJeO0FkyGYz5eRYOFhx3Q5Pq\nlmEKmlENlCOtvOa5A1738jdg3DwDEBhBU7HuvofggQcwR8F6BWQeV6y+9hWcSVRq828kYfHSMxyp\nkzxVitr57DSZ/AnbLcNG5UbnnQeZTGCMPnjmF8LOjee8mbPvw0o+QeYxMJaju8CEzY1M2gqdp0El\nyna1wbxjNzcZZaOfSqQ4sPtAtGig66/HvLToZMNCuphg787b+O/fuLusQeMj0uS0d5I2lQKlbH+6\nZ8LWvH/anndQGwwlhtj+0u2ceOYEa8U1EirBbS+9jpkT55VlmH2n21w2mfjzm+wJ5nVY+MsUxp3i\nutnMiHia0Lv0Quk9w8C48wBXFbeVVq0X16s1dIJEzCwLikWsV9jGHqCQsGURDrz7INlLs4ydP8bY\n80YY0j5RtFpMT5cnaZNJ+OAHbX+6z01l3DzDwoR9HI3m2FPHWCvak6xFXeTu//slzMN2tS5z+qGq\n4l3WikVB29sXUmDdskuM/YAgLh1hcDEMJkfhiCdOvsIoh0W0OAVSMo8VmH+D08N3PuuPyImcR2Ca\n5Z492HMDq6tVcgelpo8aWCsW64+uV723kbDdTMZyAcv8t6rgpcz7fIlkuybrnyvPMMEcrZ+TIPQm\nYvCFztIL7hwPNTNeg0odugZ4YQHDslh49TDWS1ZDjV9kPRpn1FAimawbaeQXRtNaU9RFe9L3MSCd\nJmM8j/nHnPSEoQ0yj/8ZxvL5jYm9eR585jfuZeIGRUGvheoKNVtXQOgAWuueWcbGxrTQx+TzWqfT\nWoP9N5/vThtyuWjHdtqbH0Xnrk3o/H1T7W2Xe24SCa2nIhwrn9f592d1bjar8/dNVfzv/Y75vNa5\n7IrOp24MPvf1zkkuZ38OdO4daD5aXnKHcpVNOpnX6Zm05qPo9Exa50924TceQICjOoKN7bqR9y5i\n8Pscj+HQYL/uJFNTtjFt4IGTv29Kpz+SrDRgjTw0GqHWfv3veR8QQ0Nap1K1v1fYuQ95COdP5nXu\nUK78fZ1t8pendPpjQ6EGPXcoV/OBILSHqAZfJm2FzlErPrzdeJKkgMqs1RpYL1ktJ2adLmB94eNw\n/fWVs6BxETaZ7bpUKmZePe6mtTVYX6/9vcLO/dxclduqqvrXKKUcB+OaD7Lw86vInZMNdOdkfjpM\nWtue4kaF5oT2Iz58oXPUig9vN5ZVnbUa4YFToZSphsh86dtQPGO/2YCEQ0sEzSU4E8cUCjA0ZPfb\n19fDH6RB59404fDh8jZDQ3acf5DCp+GEk05MYBQKGOk0LEzCqOcYponx3ntYCJJ0EHoCMfhCZwmJ\nOmkL3glir4FMJuHWWyO1o6KAyJEHsS4+BmfszNqoD42W8bbdNeh+Aw71H6T+c29Z5ZEBwFVXgWGQ\nWSZYDjpsEtu7P0fSwVguwrZVuDm+0yDEQBS/T6cW8eH3JxX+4I4dNMA33YLvPX8yX/ZdT6Hzl22J\nNrEaFzXa3sj5DfPN+33/gfusNemez2udzdrzCd2clB9QkElboRfoWtRGrQniJgx/1WTkbLYNjW6c\nRs5v4LbeSJ8ov03QufM+CFIp2/CLse8oUQ2+TNoKbSXIHxwVc9lkz+KeKnGwsPUV7+0YDp6kDJoA\njUCFuFnUZKU6mCZcfTWMj5eXq6OJa5Zo5PwGbWuOwsQ5R9j37KI9QRtwTivwTyybpp0hXChgjsKe\nq9btSd4eyLEQqhEfvtBWmi0PGFYLtlZB8Yr3tqZZ+NO9tjSw169dzw8dQr2ShI1imrB7d6UL3eXw\n4eiqxI2c36BtW6nXW5GQNUpJV2heLbGwbMqEbQ8iPXyhrbiGMvfGXEPl/aoM0eG54PWeHq3/vbkt\nx6vDHFsIDTVGDWazs7EYMv98qZf19UgRo6U2RT2/QdtWjVxqPDCqRlaeh2eFrpBea2gkJ3QOUcsU\nukatFPwgRUeu+Tpzp87lsLqN9Uu/RFonWRi5tSRpbC6b7D6wm/WibUmHEkPs372/2gh2Ud7BrQ/+\n9NPwne9URoq6dLqMbBQphMDavMvADTfAxkZFD7/p2r1C00hNW6GniVLc2/zA1VgnF21dmOVrmUge\noLAxxFBqnatefxOTv8hjPJnG3PvXfPyBHTz7LCRf/0VWfm2itI9m6+W6hhns2ipxGF9/ffBkEl7/\n+sptzjuvxvG6+KAKq0Ns/pfdWN/+EpkVYCiFdcsuMrsmxdh3GKlpK/Qc3p5kPd+xuWxijUBmKYWx\nvM6eRJbCxhAAa+spLvnmVRjkMbmKG/7rGBtuv+XUb5NcPcXGb32o6UxPv399aclWKW7VxlpW2diD\n3bu/8sqI9cF9AmbWk501rEH+f3PZZCK9SOFNMP/GJAsjH2T25hm7rX+8p2cE8oQy4sMXOoI/XX/4\nrOFQ33Fp22cXmbhBYb4/S+a2N5Rd70MbZFJfB8BKZNnQ3n6L4oKn3lvl064V2ePH719fW4vuU69F\nJmMns7o0VB/c8ZebozBxzXr0qBqXIF3/Bgjy/1c8tNUG1ktWm46CEjqDGHyhI/h79KvPrYZONlZs\nq9ewdl2CMbODvf/9IbZfc4S9/+MYxoGbIZcjc9sbSPrGqe+5cbhicrVKG6aOkcxkbGPs4igOtExL\n9cGdyeaKydGoYa5tMsKBujlBUVBCzyAGX+gIQdEgpaiXZSp6n0Hbmssm9/zsLZwY/y3u+dlb7Fjv\n2VmMmR0cPAhjY3DRRTA1BTP2HG6pVz93dK6hXADDsI2xa5jjcOd4991UfXBHSiHz6ixpZQ8TIrus\nGjDCQQMB04Srf+cxdv+/95cfmvdP27o5X9wgdyzBwoWObk43BfKE+kTJzurUIpm2/U0j6fr+bRuV\n3fVmlQ7dOaRTd6Y2nUZ7WEJww1IVEesQhKlRuOvY+q+ad19rn/9bt8eaySy0BhEzbWXSVugYgdWf\nQhKh/Ns2msDldQutFdfIXprlkhddsmmqMIVVV4QGqmi5RFQpDRsIuOs4/XxYeSvp1y2R2W5A2i2l\n5evJd1IgT2gIMfhC2zGXTeaO2jGOk+O+yJIgJcgAKjJdfzqM8ccWZAg1LP4HRNVxe5wmE4LDiWCE\nw36KkgrzWRtc9fYUk+6cy/CO8IdIj5WyFByiDAPqLcBngWeAE551Lwa+Bjzi/H1Rvf2IS6f/yJ/M\n66E7h0qumNSdqWpXhMcFkD+Z19nPZ3X28yFiXg2USeyKSmdMdKsaZJg2WkMeml4oZTlg0GHxtP8F\nvN237kPAktb6lcCS81oYMKwVi7ViOfh8vbheMWlqLpvsSVqY78tgjsJNB25i8dFFFh9dZPeB3dUR\nNQ1MQLrSAdbhOcwPNKhK1mVcL0wuV+nO6cRx/WoUYYW4QpFInZ4lFoOvtf468M++1e8EPuf8/zng\nXXEcS9hcZEYyDCXKweepRKrkf/eHS84dnav5cLB3GD0KxJVn2PfsIhPPW8S8Y/emM/p+Q9tIPkE7\nqdkOidTpWdoZlnme1vop5/9/As5r47GEHsUYNdi/ez/ZS7NkL81yYPeBki/dH5sPhD4cKti5046X\nrNP1tVYsCtp+gBRSYF1QW5XMlStuVKK4UzSaT9DSsWqci7rtaPPwpFceepuRjkzaaq21UipQtEcp\ndQtwC8C2bds60Ryhw4RFlQRNrE6OT4ZP8HpDV9JpW3TGh1ecjLP/C6mXP8X66JdJr0Pmxyl4b3Bv\ns11yCnHSkpRxA9Q7F5Ha4ZWj9r5utW015LGF+rSzh/+0Uup8AOfvM0Ebaa0/rbUe11qPn3vuuW1s\njtANavXGgtL1jVGDQ+85xKH3HIoWwuk9lmOoFhfh2DE49rcv48zB/WT/78dY+Lcsxp3hqa2tyil0\nYnTQiJSx26YwNYVa79U7F5Ha0abs3lYK6sROi3IVbdtXLaLM7EZZgIupjNL5b8CHnP8/BNxdbx8S\npdM/uNE2YQlPU1Nab9um9chIAxXx6kR/+KsaBuUE1dp1KlX+zNBQ9OCSfL5cyhW0VqrB79UAbuTR\n1J88WDNypl752VpBNFHORd0IqFolJlugayUzqxoSYyRSDPuikzVtgQXgKeA08CPg94CXYEfnPAIc\nBl5cbz9i8LtEzJmR3psyKDt2aqraKKdSDRj9sGLePkMVxXDn81qPjdkPnxtvtI10o4Y67EEDWieT\ntvG/6KL4ap5HsQ+17G0UW+zWJG/6odWu0MxGa/C2izgfaDHsq6MGP65FDH4XaMON6ZdB8PfGtvuy\n8uPsBLqGamysvrHK522D7G1Do0bZPZ5/P2FLHEY/qsFutocfG3FLLPRSfP8g9/DjWsTgd4E2DL0r\ndGw+ktDZT45VuXOa7uHHSFDPfPv26J/33qeplN2TV6q2wW9k/1GOW8s+1LK3m1Lupk1uoqaJ8yS2\nuC8x+EI02tRryt83pXPXJnR+NHi/TfnwY6bVHn6Q/fGOMMbGtN6ypfn912v7pjPYrdJLPfweQwy+\nEIn8ybztD31/zFa313pjIXh9+GHG2DXiIyOV20WxP+7+4/Thb0Ziu84G8klXHzH4Ql3yJ/OlKJpA\njZuWdt4fvbGgiWBvT32Q7M/UlO2Sanie42Repz9m6ymlp9D5y8P9d3W1lNpBH/yIUQ2+FEAZFEwT\n8wNXs2ff1aWY+Lmjc6wX7YDr9eJ6KeEpFrolBhMz/ph0FzdcumGdmS4zPQ2ve539t9HPfeITcOKE\n/Tfy500T67PTFRnP0/9uHfNI9bVmLpv1tZTixjQx79jNnsf2bTrpjWYQgz8IOBf1xPMWbV2ZP7+p\nM2npPWgNG81v8Zc7dOmVr+QmfI2Pl5ew5K+mjTYB8gpRzp+TfJXJnyDtPjQ1nHgZ3DD8ANN/XdkA\n63AELaWYMY/M2TWCd9i1goMeRP2EGPxBwLKwLlgv10LVa1grFpPjkyXtmqHEEJPj1VIF/UQzyZ/e\ncocjI7BtW2UZxW5SlVnsLIuL9vp6Rtp9HeUh6H/ARXrgOZnRxjIsHITtvzwblP3WBkXu+vp/xbx/\nutSIzKcshjbKH0+RjFbCsQWsEU+N4JT9uiE6lSEbE2Lw67HJftBAhofJ/OOWUi8rrYZKlZ/2795P\n7o059u/eX1OTJFAiYZOdm2ZVe906tI8+CqdO9Yaxh3B3E9jr/d8vyGhHfQjOzNgPuu3b4cYb4fjx\n8JFE6bIYvrmsmjmUYtvLRkmoRGm7otJMP/gJ2+hbFsb3N9h/ALI/hLEfwa61CyKeiebJ7Jos1QhO\nkmD4siuifzgu+YhO3kdRHP2dWnpu0rYfJh493yF/2Radu2Os4ckwfzr71NKUHXFxeSr43PTIJJg/\nW7Qffk4vYRPKtfIa/BOvjQZT+WUk/MepOsdTD+r8+7OlSdvkx5N6y0echDznb/ojSZ2/b6q04/yo\nPbnbKfmEqaUpnfhYovHjxRGJFtNFiUzaxkDULmEv93Q938F4+Ayz/3xlcE8+YFK3tAufYNUnvvEJ\ney7gmnXMUSrPTZtEsxrF6+5YXISbbrLX98E8cgmvu2lsrLxks/b6oO83MwPf+155lNKodL1l2WJq\nLt6RhGna8wIVt8zqDqxdl5QmbTfObPD2F4yx/RlK7p2C2sB6yaotyZnNYv3WRWU3SwcE0lafW6Wo\ni6XjzR2diya/HIfuf4eLxYjBr0WUH7RHDFwoNb5DyU1z/3TNSV2vOqKXQgqsVwBDQ+X9tnIBx/jg\nDFN87MF55JZw3U1Hj5aXQ4eif79Gg6kyGfvndkml7HXubXDiRPk993IbPmu45MpJb00zefWHmbly\niiFtrxtKDJH56bD9A01Oktn7P2urccbcwfJe36lEiqXHl6LVHIgjEq3DxWKUPRroDcbHx/XRo0e7\n3YxK6hVj3rPHNvYuuZxtUXoJ5zuYO4axXrJauoFcXfG0TrLzkQ0WX1X+SO6NOWaz5e9hLptM/+//\nzInnTpXWJYrw5f1gjGRtK+Mey6tZH/FGMO+fxtr/h2QeOYPxj0Mti9H7Nd2HfLuUGtvN49YcALsk\ngWFU3wbbtzujiNGyfn1yS5Jb33wrM2+ZwVw22X1gN+vFdVIkOfDlLRjfXS9dM+YopTDhK86/gtXn\n7OvWWKap66vud1o2sVYsHv/Z4yw+ulha778P2kIMF6NS6pjWerzedh0pgLKpMYzy7NaePdU/SiYD\n8/PlC7AXy7kZBuaoY+AfswtH7LxoZ9lNozZgyxbS62copCBdTNg9Lso3wvATT7PtxJM8cjGsJSFZ\nhHc9DNarErD7CgzPsVhYaOgCNpdNJh65i8L4GeYvh4WDaxhzcy3dyK67w2+YoPKZND/fH+6dTuLe\nElC+LYaH7cvfvQ1mZpwHwWLZHbhxZoPV51YB201YygFhg7nXgvFdyqPC0QxHTh2hcLpQMsDzx+dZ\neHYnhn8EGcOP59ZiMJfN0nGj1ByIBe8JbTNi8KNQy0I0YeC6QVA5wfTWdLnaVGYvk199AOsX3ybz\nSBHj0bswl3/IRHrR3l4Dl0LqtB1FccXzR7jntacoqA3mn7yHheUd5bmBBi9ga8WikHB8qI6bKI6z\nGNaMIK9Tj/5sPY1/MLd3L6yuVt4G/qpmrgHNjGS491v3loz+0giYo2A8aXeavNerS+F0AWsEDO+T\nJeYOlluUx1qxSpFs/YT48KNQzy+9CRzD/ipFk+OTldWmbp7BeNGVzP7lGYxlMC8tMv3TL5VvOmeC\nbX0rXPIvCVZfP2qPDGh9Yi0zkimFxqXXIfNEslS+sB2VpKTGdjz4b4vV1erbIKiqmbt+1yW7Stut\nJWHu+otKnamgeaP01jSZXZOVfnOIPWDCGDWYzc72nbEH8eFHo0m/dEfw+/9q+ANd90xoz8U04frr\nMS8tMnGDk5CiKRl7gLROsjByK7xpR3kOYGu65dqi5rKJdXiOzAoYO23/i2na0TVuVEgqFR590vDx\nNqkPP8h/3s22tHJbuFIKbnZtKpGqKHLvdSeuPn6SzHYD4+ZyEoR5/zTWgbvI/LBojwwaaEDde6HN\nxH38qD78rsfee5eei8P30iOx5RX4Y3inplqP6Z2a0rmrVUXxEvVhSjHUU0tl5ay6Ze5aJEivvkdF\nNztCvRj4brWpldsi+/lsYFW0igMEXNP5k3md/kiyLMg2Gv3i6HaZxHYcH4nDj5l6bptuxOI7Y2pz\nFPb8+wLmA/+zfkhkvXbOzJD57dtJF+2QuUQRtHOVeCfdoP1D37AQwEGlVgx8t2jVmzk5Plk7BDPE\nnWqtWGWXYsoJHohycbhibl0shN7NQuxi8JvBbzSdsa15eB97PnN9WR+k3WQymJenmLgBW/zpbf9q\nJ0JBsHM6as7Am3awc/RtZH9ljNueGyv71zsVteBgGKVcnJrJRINCPz4Aw3z8JUImXIbPGia5xY45\nSeskmd231b84vGJup51dduCa9suSVLS9w/eU+PBrUPKz/XQY46HV8t3ld1xaFubhfSW/d1onWZg4\n2BHf4J59V7PvWU/c8EMw++T2Ulxcha/wjy3Ytw9z1I6Eybw6i/FHh6q+s983D0TzN25Wx/gmopd8\n+B3DNDEOIFt2AAAc40lEQVSPzGGN2No3UM4hSagEt/3Gbcy8JYLAkSdZwBwF653byfynmbbep/77\nae+Ve7nnwXtKbb/usus47/nntezLlzj8FvH+UPOnYeEwGPPzsHNnxRDTPDKHdRk8vrqFQuqMvVpt\nYK1YHTH4mV2TzP/5EgW9Zke4/DhVYexL3+H4PAs79sI3UkxcYytnzqslFpbNinYGDTcjuW1aDG6X\nZ0U0Ohiy3TOYozBx4giFZwvMHzxSkUNS1MUKN2NNPDkzxpNpjDfPQJvvUf/9ZC6bFW3/ysNfoaiL\n9v3ZYuBDFMSlE0LFD7XVkRBwDb0zxDQvTzHxwiX2PbvI0iu3kHKc3Z0cphmjBgu/vZ/cOVkW/i2L\ncWfZ71FlvF+yinXLriqZZC/+8M3I36MFSYVeV6cQuktYDon7N/I12oWiPP776dXnvLrkzkluSVZo\n+HTCly89/BAqEkZOQ+YxIJGAK66wx9KWhXXZ4xQcd8qa3iD7yiyXvOiSjod6uVmCLqVwtrOGK5Kr\nMiMZGIH5g0dKQ8rhs4ar9tVU4kkLGceNJkLJaGBwMJdNHv/Z46QSKdaL66UcksnxyebCGj1DpKZC\nI5u4+HZetBOwJSLuefAeNs5skFAJ3vXqd7H4yGJns3qjhPJ0aum1sMxS2OH0jVonk8GhYV0M7woi\nUMrYFzo5tTSlkx9Pxt9uvx5xAx+LGk3abxLHQjjea3nozqH46tzm8xWSzZHvgQYvvop78WNDOnv7\nRVUhqHGFNiNhma1TCjv8+Xmw4ZTi8bgr6kYYtJmgoiT+4e/qw8erfPCrz62ycSaeLNkqjhyx9Ygb\n8M00MtLusJqs0EW817KbnGWtWK2V53T8h9bJxXKd3aj3QIMXX8W9qNd4+iencBRESj36Tmf1isGP\nQo1c/G6lYbsTsn4Z1yqZgk8frjK8TfvpQ9pReui0YI2jxnM3K4vQyyULhGCali2uxdwcFApkHqNc\nAU4nS2KBtRsUcPHVuLAq2n/aruVbTEBSK/ZeubcrGb7iw49CDwqkhSVvWCsWe/91O6snjpF5DIzl\n9SqneFwCUUFRQMZnhuzsIK9Gfow081OIOubmxHudemWL3eu94evWNGFpyd73Mix8ZYsd6vnIBsaT\n98DwDnu7sIvLf/FBzQur1P7Dczz+4FdZvNSO4ttQOnpkUcyIwY9Kj8XD+VUIh88aLsf7nj3Ewo9T\ntrEP6Qb7J3qboeqh84vjGG5eRxvzOxr9KUQdc/MSq2yxL1XZKFyI8ZdufYcCfPzj8J3v2O7bsJ6B\n9+Lbs6fuhVVq/wumObJyNwW1UWp/N/R8xKXTLVr0MfjnD1afW63wF1q37Gp7+Jl3yJpgC8Pf+Idy\nxZFeyPt3EHXMzU+U+bKgOa0K/BfCe95Tfp1Kwbe/HThXF0oDF5Zx8wwLEwdL7QcCXbJtJ8rMbqeW\nXovSaRttCDXJ3zdVFpPqYMTQ1NKUTn40USli1YMhNL2ofSfER6SIuaAoMvfCyGYrVfqSSVuMsN5F\n0+SFlTuUqy0a1yBIlE4PE2Vys5ERgGlivPceFr64Qe5YgoULnQmhDsxUrj63ygaVxUvYvr3nHOWb\noGSB0AJ1BcnciZzFRTuSzMW9MCYny731RALe9S6455762YBNXlhxBk40ghj8MNppLOsNBRtNPXUe\nIMYyzP5F0db9cYu67ttn/22T0a+KCvJIOwhCOwi6Nesa0ChFjNzY4C9/2d7Gu70rYBQTXQvpjjIM\n6NTSMy6dMJdLXH6BeglKjhB8fhSdewc6//5s4+31D1GzdfbRAvmTeZ2bzdrtFJ+J0EZqeUNrJjE1\nmt3nLTwA9ut617bfPnTQj0hEl07Xjbx36RWDn39/1ja0rj/a/dEiXDB1M+ei7Cef1/nLUzo9RSlL\nr65P3n9xddDg12vW2JjW556r9chIw0m4glCBvyhOLtdAIZ6oBjio8k696jvtKEbUAGLwmyR/Ml9O\nuZ5C5y9PlS+UOj9+pImjCPvRWuvcrK8S0Gy2sd6Ct5cSpXfSBvL5siKFd+mFSk3C5qTKrv7Jg/HL\nm3gP4i61jHY+r/X27ZXb+1+3uVRbVIMvPnwf1opVTrlOYYc3GkakEKxIlWwihnJldnkqAW1sITP7\nV5U+/XpzDG71kFzO/tsFn7pllaPcvPRQxKbQYVotSu+X4Vi96P74q0d5DzI1VTu82Z1vO3GivC6d\nhle/GpLJ8uteiQWO8lTo1NIzPfywHoN3SBgwPJxamqrolXvrv1YepM7Q0nk/P32jzl2zpexa8rpn\nmhguTk3ZHY+pkGbFjfTwBS/tqMkbl4Bh0+52/4jdvcHc+zOR6MgNh7h0Aoj4qzbrh48ltta770Si\n2lomk9X++QjDxampyo900uiLD3+wca+Bs8+uvpzj8HTkT+Z19vPZptU0W0qLCfpwRLdtnPSMwQfe\nDiwDjwIfqrVtWw1+nMlOIT9oLL0N377zl20pTyBv2WJb6ia+S5CLURDaTdgoL6yH7waYjY2Vl3qd\nhFbvu5btc1B0Toc1vKMa/LZq6SilEsA+4K3Aj4BvKqVMrfUPYj9YvcIEcQmqmCY8/rgtDra2VuGf\niyRKVq+dnkIi5uUpJq47Q0GdYX48wd6XX8fqtlUy/+d+jHPPhXPOgQ9/ONL3MIxKN6OEyQudIGge\n5+yz4Td/07513vc++MAHYHTUri30yU9WyN2UWFoKn4oKmjuruvdq3Hct1O6x8Ys79aDYYokoT4Vm\nF+DXgQc8r28Hbg/bvukefsRQx5afut59pFKN+yeixvc7r/2ROomPBUgYNOCbiduH7+2NiatGCCKo\nhz81Ve1ixBnABo0E6vW86/bwI9z7jfrw4ypcEhf0gksHuBH4jOf1zcCsb5tbgKPA0W3btjX3baOO\nyVpNhGh17BcYRBx+MXovZLdCVWl+4B3x+2YaOT31clNEu0ZwcX3427aVOxt+F2M9o18vsriWAQ7M\nq2nl+zThQgpsX4w3yaYx+N6lnT38WJ7IQckVEX+w/Mm8zn5yTGd/x4m6iTjB47Z7ammqfJE12cNv\n5Ku1kpvSBRemsMkI6uGnUvb6KD78esnqpe3C8mpaoNHgjMAHRMw3SVSD3249/B8DF3pe/6qzLl7q\n+MyqCnU0q13hPc7wsC2uFKGqhrlssvvAbtaL63ApLI0k2D+yF8Pd3nUgJpP2fr2H9OjW77hghz0/\n8H+exth6EqYMW7cmBhqd4shk4DOfqfS3ujVPRH9eqId72X7+87B1q+3Dn5yMXtBm9+6yEneFf9/n\nqw/KqzFavBj9tSjqCZ8FzjFYdOUmaXfi1TeBVyqlLlFKpYB3A+1R8aqhWhcpIarR46yuYl5YYM87\nwLywtna2tWLZxt5hTRWxfnHcTpwC2LvXVujb2LAfIiEZKcaoQWYkg/XvzsP80kxsxh6iS3u7iTNz\nc/AHfwDZLIyN2X/dm64V/Xl3/+Pj9tJsgo7Q+8zMwKlT8OijcOhQdHtnWWVjD3anw7IIFB2sElXb\nNdlyuxsVPgsUdutWkYYow4BWFiAL/BBYAaZrbduusMy4kjMq9nnflE5PO1o30+j8feGulfzJvE7d\nmSoNAYc+lrSHlu5wLmJcfTu+R8X+I+SDRU2cacY9GTQvEMV/KwwW+bx97VVdHzXCpTs6wRpw8Q+E\nD7/RpZ1x+HH/6M348UrJIe+3DXx+FJ2dQGd//9zKB0DIBRB30YTQtob4R4P89nHmlITNC8R9HGHz\nE3iNxukX9xjjhmzH1FQ5YbKDE1hi8NtMS71tRw0zNU1lr7+OvHC7e/hO0yp62cmkPXE2MmJny3qj\nKOKWSJAevhBEvfDfCuM/9WDrvWbPgyN/eao86fuRZM2RfGAMaod6KmLwO0ArowZ/jH3UHnvQMUPC\n+Ju65mv1st2lnRIJ/kxLie8fbKKE/wa6d1rBcxPk3uG7R69NhB/Af/MkamwbM2Lwe5wqv/6dETTv\ng/YTECkaJQfNNaojI1pfdFE5ujOsl+1dRJZB6BT1pOnb4mb09/Dv8CU81srzcW8+tyZuhxCD3wXc\nBJNzzrH/1nu4tyr6pHWwWF+ti9/fI/IuXqPvPhCCdFA6eB0LA07LPfxmh7teH/59Uzp3baIyf6bG\n5/Lvz9oV4DqYhSsGP4B2ztYHue+SyfaP6Brt4ddy2QT13F3j7x8JCEKnaMiH7zf2bZjErblZB+bZ\ngohq8NudeNUzxJZ8FUKQSNTGhhMfPGrWFlQrNdIsF0uOmIUSlHO2Y0e4blMmA/feWxnH7N1X0DpJ\nmhK6Sb1r0DDAwEm4IgM4G8eZAWh49ul97SOSkFs3ifJU6NTSzh5+u0Mag3r4iWQxegm2tsw+hR8q\nzIcvCJuOgJ58/mRe5+4Y0/nLtsTXw49Y01p6+D1Ao+nQjWIYcPAgfGDqZ6w8vg7DT5C46m6Ov6BA\n4ZkIT/yw9ME2dK+l1y70Fb6evHlkjonjSxQSa8xfBwtswbhub2sXvWVhXljAegVkHitgTE/b6337\njCSR3kUGxuB34ocwDLCSd7Dvm/sAWAd4Zoz01nT9B43f1+IK0wiCUBufoL119tOV+jkXn8FYXW3p\nEOaOYSZeCIWtMP8GWDh4AmNiIlBDy6t/1WsMVBFzY9RgNjvb1h8jM5IhrYYASK/D5P/6HgsX7q2v\nu2EYcOCALUrjFaYRBKE23qLje/eSyX+PtNNvShRh+EyyqvPUaDF16yWrFLba/xdSYL2C8rxARPw6\nUV3Riori9+nUstnDMl3i1t8WBCEiThja1FvQyTscrauPDVUlKvqzyRsqozhNtBBN7+dr5LfEkbFO\nRB/+QPXwo2Ium+xZ3IO53Nyj19g5yezfpjGW6awSniAMOo4K5eoQbCTsVQW9VqGQa1mVst4bG7C4\nWBLYDKRCIXN0CmNXrqYkuh//Mb2srzc0UGiJgfHhRyWW8M1ma1rWq3crCEJtnHsvc2SOebVEQa9V\nzZ0F1XKASg9N0G1Y4Zu/ubFmhR0TIJXqXJ9Q2aOB3mB8fFwfPXq06c+byxHj3WuwZ3FPadIVIPfG\nHLPZ2abbFBlXy9utpNxA70EQhGpq2QM35eXpp+HECdsQp9N2aQq3rlHct6H3mC7nnRe98EstlFLH\ntNbj9bbrmx5+XIlV7Q7fDEXKRAlCrNSKlvGGJnsH1u28DXshHLpvfPhxVbVqtJpNbMRQAcc07SJa\nUiFKEKLjLZbXrUJUnaJvDH5gGbEmccM3WTZiMaCRJoG9oWXOOLIRAx5Q3U0QhAYJuA37qyMVJZSn\nU0urYZlxiqPFobvkqmEO3TnUcKp1o8cPqe4WZxU1QRg4WrUD9YTf4oJBDMuMM7EqyJfXCO6cwuKj\ni6wVnay/BlxNjR4/aCgqvX5BaI1W7IBpwk032SGfx47Zf2+6qbv3YV8Z/Dhp1ZfnnVNwacTV1Ojx\ng4airT60BGHQacUOBMXeuxJZ3WLgDX6Yfz3IgDaCd04hlUiRvTTb0CRwmC+xVjq4d/IJ+n8CShDa\nTVQ7MP2ph3jdtUeY/tRDpXWZjC2J5aXbEll9FYcfRM1YXE8oZ3prOvaonDjyAtyQseFh+OQnyz2G\nVAo++EFYXa2dpyW5XILQXqY/9RCf+P3tcPr5sPWXTP1/J5j5f3YAlbH3ccXcBxE1Dr//DL7Hwpmj\nlA26TrIwcivGzTOlTbuWZBURby5WMlldYMVdJ3lagtA9XnftEU785c7S6+3XHOF7f7EzdPt2ENXg\n95dLxzdLaR2eK8fmqw3m/vj77Ln6sZI7JM5Qznbg9cFvbMAWz6+1ZUv5ASD+eUHoHkb2ebD1l/aL\nrb+0X/cofZNpC1TNUmZWYP4FSQpqg9SJa1k6+gXW9POZP+L2iNuvkd+KW8cn883evXD8uP3eFVdU\npoCLf14QuoPtvnkIc/HfMLLPK7lzepG+cumY0w9h3fVtMsVFjPQSLCxgrj6EdeAuHv/G/2Dx5+8r\nbZvL2ROcVfuIwe/u3VercwS1fPDinxcEAQZRS8eEiXt2UCjuYD75H1nYexzD2IGBgTG8A7P4FEeW\nNiisJUN7xHEXOo+joHEt/Y1e0OYQBGHz0Dc+/ApvzsYQ1qpnWGUYGIcmWdifJJd9jIWdc3aVe/8+\nYtLjcen1OQJBELpLp2Ub+sbgR4k5NzCZPfI6jMX3BqaeNmqg68bFd0uITRCEnqcbmfB949KJVHOk\njvZpI4XOTRN27y7XHF9aCi5DW0ui1Y3RhfbF5wqC0Jt0QxG9bww+RPBp+8NeAoYBUSvOW1bZ2EM5\nZbqR4lZRHhiCIPQnEcxR7PSNSycSreoleMhk7GxXl0ZTpsMeGIIgbG6i+uVjNEeR6auwzE7TikvG\n38MfGpIeviBsdrpVqXTgwjJbosmA9lbCIg0DDhwQH74gbBaimIler1QqBt95JJsXFrCe+FMyq7dV\n6O20E4mjF4TNgbfnPj8f3nPvhl++EQbLhx+EZWFeWGDiBtg3VmRi5e7apQgFQRg4otaW6IZfvhHE\n4GcyWK9KUHAmYAtqo+WEK0EQeoc4kpsaqS3hr0vRS7Rk8JVSu5VS31dKnVFKjfveu10p9ahSalkp\n9bbWmtkgDf7CmV+5gnQxAUhGrCBsVoJu+7iSm3q95x6VVn34J4DrgTnvSqXUa4B3A68FXg4cVkq9\nSmtdbPF49YnqbPNsaxQKLFyeYu4/XAEvPa/tTRQEIV7Cbvs4J1H7Yc6tpR6+1vphrfVywFvvBL6o\ntV7TWj8OPAq8qZVjRcb/C8/NRdt2bZ0jq99h8dFFJg5OiB9fEDYRYT52KfNZSbt8+BcAT3pe/8hZ\n1378hSQPHw4fx3muButVCQrKrigSh3CaIAidI8ywt8sV02nRs7ioa/CVUoeVUicClnfG0QCl1C1K\nqaNKqaM/+clPWt+hYcBVV5Vfr69HmlLP7L4tdmVLV1zt0kvhpS+1/4YJrQmC0Dx+ww5lYUOIdxK1\nG6JnsaG1bnkBjgDjnte3A7d7Xj8A/Hq9/YyNjelYyOe1Tqe1BvtvPh/tYyfzOncop/Mno21frwmp\nlN0E/zI0FLlJgiA0iP/ei/t+y+Uq7+dcLr59NwtwVEew1e1y6ZjAu5VSQ0qpS4BXAv/QpmNV0+Q4\nzhg1mM3OxiJj7NfK8SK6OYLQPtqtU7WZ5wVaDcu8Tin1I+DXgUNKqQcAtNbfB/YDPwC+CuR0JyJ0\nvHQ5GNYvrualUaE1QRCi06qwYT02c4imiKe1EVdcbXkZfvELeMELYHRUdHMEod0MWq2JqOJpYvAF\nQRA2OVENfn9LK2zW2ClBEIQ20L8Gf1PHTgmCIMRP/xr8qPJ2giAIA0L/GvzNHDslCILQBvq3AIob\nO9VEJStBEIR+pH8NPvSHvJ0gCEJM9K9LRxAEQahADL4gCMKAMDAGX0LyBUEYdAbC4EtIviAIwoAY\nfAnJFwRBGBCDLyH5giAI/R6W6SAh+YIgCANi8KEyJH/QpFMFQRBggAy+i2nC7t3lijhLD5xm/23f\nwpjZ0d2GCYIgtJmB8OF7qSp/VtyKdde3JXRHEIS+Z+AMflX5M54jU1yU0B1BEPqegTP4hgEHDkB2\n7J/Ibvkr9nMTRnpJQncEQeh7Bs6HD+4E7svAPA3WNsgsYGIwdzU8/TT8/OewsQHveQ/MzHS7tYIg\nCPEwkAa/hBO6Y5pw002wtlb59ic+Yf8Voy8IQj8wcC6dICyr2ti7yFyuIAj9ghh8bPf90FDwexKj\nLwhCvzDYLh0Hw4D9++1kLPHhC4LQrwyswTeXTawVi+FTN7P6/R1kMnDoULdbJQiC0D4G0uBP//U0\nd//93Wz84B1wcDuchvl5W29HXDiCIPQr/evDD6l4Yi6b3PV3d7FxZgNW3gqnnw+IbLIgCP1Pfxr8\nGhVPrBWLoi7aL0a+Blt/CYhssiAI/U9/GvwaFU8yIxnSW21x/MRli9z44S+Ty4k7RxCE/qc/ffiZ\njO2ULxSquu7GqMHCDQtYKxaZkQzGqFh5QRAGA6W17nYbSoyPj+ujR4/GszPTlIongiAMBEqpY1rr\n8Xrb9WcPHyorngiCIAh96sMXBEEQquh7gx8SnSkIgjBw9LXBrxGdKQiCMHD0tcGvEZ0pCIIwcPS1\nwc9k7KhMkMQqQRCE/o3SwQ7SWViQ6ExBEARo0eArpf4bcC2wDqwA/1Fr/XPnvduB3wOKwH/WWj/Q\nYlubQqIzBUEQbFp16XwN2K61vhz4IXA7gFLqNcC7gdcCbwf+RCmVaPFYgiAIQgu0ZPC11pbWesN5\n+SDwq87/7wS+qLVe01o/DjwKvKmVYwmCIAitEeek7X8C/sr5/wLgSc97P3LWCYIgCF2irg9fKXUY\neFnAW9Na67yzzTSwAXyh0QYopW4BbgHYtm1box8XBEEQIlLX4Gutd9V6Xyn1H4BrgKt0WYntx8CF\nns1+1VkXtP9PA58GWzytfpMFQRCEZmjJpaOUejtwK2BorQuet0zg3UqpIaXUJcArgX9o5ViCIAhC\na7Qahz8LDAFfU0oBPKi1fq/W+vtKqf3AD7BdPTmt3TJTgiAIQjdoyeBrrS+t8d4MMNPK/gVBEIT4\n6KkCKEqpnwCnmvz4OcCzMTanXUg742MztBE2Rzs3QxtB2hnGRVrrc+tt1FMGvxWUUkejVHzpNtLO\n+NgMbYTN0c7N0EaQdrZKX4unCYIgCGXE4AuCIAwI/WTwP93tBkRE2hkfm6GNsDnauRnaCNLOlugb\nH74gCIJQm37q4QuCIAg16AuDr5R6u1JqWSn1qFLqQ91uj4tS6h+VUt9TSh1XSh111r1YKfU1pdQj\nzt8XdaFdn1VKPaOUOuFZF9oupdTtzrldVkq9rcvt/KhS6sfOOT2ulMp2s51KqQuVUn+jlPqBUur7\nSqnfd9b31Pms0c6eOZ9KqbOUUv+glPqO08aPOet77VyGtbNnzmUoWutNvQAJ7OIrrwBSwHeA13S7\nXU7b/hE4x7fubuBDzv8fAu7qQrt+E/g14ES9dgGvcc7pEHCJc64TXWznR4EPBmzblXYC5wO/5vz/\nK9h1IV7Ta+ezRjt75nwCCjjb+X8r8BBwZQ+ey7B29sy5DFv6oYf/JuBRrfVjWut14IvYevy9yjuB\nzzn/fw54V6cboLX+OvDPvtVh7epabYOQdobRlXZqrZ/SWn/L+f9fgIexpcB76nzWaGcYHW+ntvlX\n5+VWZ9H03rkMa2cYPVMfpB8Mfi9r72vgsFLqmCMDDXCe1vop5/9/As7rTtOqCGtXL57f9ymlvuu4\nfNzhfdfbqZS6GHgDdo+vZ8+nr53QQ+dTKZVQSh0HngG+prXuyXMZ0k7ooXMZRD8Y/F7mN7TWVwDv\nAHJKqd/0vqnt8V7PhUn1arscPoXtvrsCeAr4ZHebY6OUOhs4COzVWv/C+14vnc+AdvbU+dRaF517\n5leBNymltvve74lzGdLOnjqXQfSDwY+svd9ptNY/dv4+A3wFexj3tFLqfADn7zPda2EFYe3qqfOr\ntX7audnOAPdSHhp3rZ1Kqa3YRvQLWusvO6t77nwGtbMXz6fTrp8Df4NdE7vnzmVQO3v1XHrpB4P/\nTeCVSqlLlFIp7OLpZpfbhFLq+UqpX3H/BzLACey2/a6z2e8C+e60sIqwdvVUbQP3xne4DvucQpfa\nqZRSwJ8BD2ut/8jzVk+dz7B29tL5VEqdq5R6ofP/84C3AifpvXMZ2M5eOpehdGOmOO4FyGJHHaxg\nl17shTa9Antm/jvA9912AS8BloBHgMPAi7vQtgXsIedpbH/i79VqFzDtnNtl4B1dbuf9wPeA72Lf\nSOd3s53Ab2C7GL4LHHeWbK+dzxrt7JnzCVwOfNtpywngw876XjuXYe3smXMZtkimrSAIwoDQDy4d\nQRAEIQJi8AVBEAYEMfiCIAgDghh8QRCEAUEMviAIwoAgBl8QBGFAEIMvCIIwIIjBFwRBGBD+f5KG\nQdp+aHAwAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x1197209b0>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "m1 = plt.scatter(366 * X_train[:, 1],y_train, c='r',s=10)\n",
    "m2 = plt.scatter(366 * X_test[:, 1], y_test,c='g',s=10)\n",
    "m3 = plt.scatter(366 * X_test[:, 1], y_pred,c='b',s=10)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 分类模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class GradientBoostingClassifier(GradientBoosting):\n",
    "    def __init__(self, n_estimators=200, learning_rate=.5, min_samples_split=2,\n",
    "                 min_info_gain=1e-7, max_depth=2, debug=False):\n",
    "        super(GradientBoostingClassifier, self).__init__(n_estimators=n_estimators, \n",
    "            learning_rate=learning_rate, \n",
    "            min_samples_split=min_samples_split, \n",
    "            min_impurity=min_info_gain,\n",
    "            max_depth=max_depth,\n",
    "            regression=False)\n",
    "\n",
    "    def fit(self, X, y):\n",
    "        y = to_categorical(y)\n",
    "        super(GradientBoostingClassifier, self).fit(X, y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def to_categorical(x, n_col=None):\n",
    "    \"\"\" One-hot encoding of nominal values \"\"\"\n",
    "    if not n_col:\n",
    "        n_col = np.amax(x) + 1\n",
    "    one_hot = np.zeros((x.shape[0], n_col))\n",
    "    one_hot[np.arange(x.shape[0]), x] = 1\n",
    "    return one_hot"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Users/haxu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:42: RuntimeWarning: overflow encountered in exp\n",
      "/Users/haxu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:42: RuntimeWarning: invalid value encountered in true_divide\n"
     ]
    }
   ],
   "source": [
    "from sklearn import datasets\n",
    "from sklearn.metrics import accuracy_score\n",
    "data = datasets.load_iris()\n",
    "X = data.data\n",
    "y = data.target\n",
    "\n",
    "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4)\n",
    "\n",
    "clf = GradientBoostingClassifier()\n",
    "clf.fit(X_train, y_train)\n",
    "y_pred = clf.predict(X_test)\n",
    "accuracy = accuracy_score(y_test, y_pred)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.93333333333333335"
      ]
     },
     "execution_count": 39,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "accuracy"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
