{
 "cells": [
  {
   "cell_type": "code",
   "id": "initial_id",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "def calculate_gini_impurity(class_counts):\n",
    "    \"\"\"\n",
    "    计算给定类别的计数所对应的Gini不纯度。\n",
    "    \n",
    "    参数:\n",
    "    class_counts (list or tuple): 每个类别的样本数量或比例，如 [正类数量, 负类数量] 或 [正类比例, 负类比例]。\n",
    "    \n",
    "    返回:\n",
    "    float: Gini不纯度值。\n",
    "    \"\"\"\n",
    "    total = sum(class_counts)\n",
    "    if total == 0:\n",
    "        return 0  # 防止除以零的情况\n",
    "    \n",
    "    probabilities = [count / total for count in class_counts]\n",
    "    gini = 1 - sum(p ** 2 for p in probabilities)\n",
    "    return gini\n",
    "\n",
    "# 示例数据：假设有80个正类样本和20个负类样本\n",
    "positive_samples = 20\n",
    "negative_samples = 80\n",
    "class_counts = [positive_samples, negative_samples]\n",
    "\n",
    "# 计算Gini不纯度\n",
    "gini_impurity = calculate_gini_impurity(class_counts)\n",
    "print(f\"Gini Impurity: {gini_impurity:.4f}\")\n",
    "\n",
    "# 另一个示例：完全纯净的数据集\n",
    "pure_class_counts = [100, 0]  # 所有样本都属于同一个类别\n",
    "pure_gini_impurity = calculate_gini_impurity(pure_class_counts)\n",
    "print(f\"Pure Dataset Gini Impurity: {pure_gini_impurity:.4f}\")\n",
    "\n",
    "# 还可以考虑比例而非绝对数量\n",
    "proportions = [0.8, 0.2]  # 相当于上面的例子\n",
    "proportion_gini_impurity = calculate_gini_impurity(proportions)\n",
    "print(f\"Gini Impurity from proportions: {proportion_gini_impurity:.4f}\")"
   ],
   "outputs": [],
   "execution_count": null
  },
  {
   "metadata": {},
   "cell_type": "code",
   "source": [
    "from sklearn.datasets import load_iris\n",
    "from sklearn.tree import DecisionTreeClassifier, plot_tree\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.tree import export_graphviz\n",
    "import graphviz\n",
    "# 加载Iris数据集\n",
    "iris = load_iris()\n",
    "X, y = iris.data, iris.target\n",
    "print(X.shape, y.shape,iris.feature_names, iris.target_names)\n",
    "# 创建决策树分类器实例并拟合数据\n",
    "clf = DecisionTreeClassifier(random_state=1234,max_depth=3)\n",
    "model = clf.fit(X, y)\n",
    "dot_data = export_graphviz(clf, out_file=None, \n",
    "                           feature_names=iris.feature_names,  \n",
    "                           class_names=iris.target_names,  \n",
    "                           filled=True, rounded=True,  \n",
    "                           special_characters=True)  \n",
    "graphviz.Source(dot_data)\n"
   ],
   "id": "7c489ec384fc0867",
   "outputs": [],
   "execution_count": null
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-01-09T12:26:48.871305Z",
     "start_time": "2025-01-09T12:26:16.527433Z"
    }
   },
   "cell_type": "code",
   "source": [
    "from sklearn.base import BaseEstimator, TransformerMixin\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "from sklearn.feature_extraction.text import TfidfVectorizer\n",
    "from sklearn.preprocessing import MultiLabelBinarizer\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.pipeline import Pipeline\n",
    "from sklearn.multioutput import MultiOutputClassifier\n",
    "from sklearn.linear_model import LogisticRegression\n",
    "from sklearn.tree import DecisionTreeClassifier\n",
    "from sklearn.metrics import classification_report\n",
    "from collections import Counter\n",
    "from sklearn.neighbors import KNeighborsClassifier\n",
    "from sklearn.ensemble import RandomForestClassifier\n",
    "from sklearn.naive_bayes import MultinomialNB\n",
    "import jieba\n",
    "import re\n",
    "from sklearn.base import BaseEstimator, TransformerMixin\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "from sklearn.feature_extraction.text import TfidfVectorizer\n",
    "from sklearn.preprocessing import MultiLabelBinarizer\n",
    "from sklearn.model_selection import train_test_split, cross_val_score\n",
    "from sklearn.pipeline import Pipeline\n",
    "from sklearn.multioutput import MultiOutputClassifier\n",
    "from sklearn.linear_model import LogisticRegression\n",
    "from sklearn.tree import DecisionTreeClassifier\n",
    "from sklearn.metrics import classification_report\n",
    "from collections import Counter\n",
    "from sklearn.neighbors import KNeighborsClassifier\n",
    "from sklearn.ensemble import RandomForestClassifier\n",
    "from sklearn.ensemble import ExtraTreesClassifier\n",
    "from sklearn.naive_bayes import MultinomialNB\n",
    "import jieba\n",
    "import re\n",
    "\n",
    "# 初始化参数\n",
    "df = pd.read_excel('D:\\python\\pytorch-learn\\demo.xlsx', sheet_name='demo', usecols=[0, 1])\n",
    "meta = pd.read_excel('D:\\python\\pytorch-learn\\demo.xlsx', sheet_name='meta', usecols=[0, 1], dtype=str)\n",
    "\n",
    "# code  = pd.read_excel('D:\\python\\pytorch-learn\\code.xlsx',sheet_name='基础编码',usecols=[0,1,2,3,4])\n",
    "df['code'] = df['code'].apply(lambda x: x.split('-'))\n",
    "all_labels = [item for sublist in df['code'] for item in sublist]\n",
    "label_counts = Counter(all_labels)\n",
    "rare_labels = {label for label, count in label_counts.items() if count == 1}\n",
    "jieba.add_word(\"1.5米\")\n",
    "jieba.add_word(\"1米\")\n",
    "jieba.add_word(\"2米\")\n",
    "jieba.add_word(\"1.8米\")\n",
    "jieba.add_word(\"线\")\n",
    "jieba.add_word(\"100w\")\n",
    "jieba.add_word(\"88w\")\n",
    "jieba.add_word(\"66w\")\n",
    "jieba.add_word(\"67w\")\n",
    "jieba.add_word(\"80w\")\n",
    "jieba.add_word(\"200w\")\n",
    "jieba.add_word(\"60w\")\n",
    "jieba.add_word(\"45w\")\n",
    "jieba.add_word(\"cc\")\n",
    "jieba.add_word(\"120w\")\n",
    "jieba.add_word(\"88w\")\n",
    "jieba.add_word(\"荣max\")\n",
    "jieba.add_word(\"30w\")\n",
    "jieba.add_word(\"65w\")\n",
    "jieba.add_word(\"op\")\n",
    "jieba.add_word(\"oppo\")\n",
    "jieba.add_word(\"mi\")\n",
    "jieba.add_word(\"vo\")\n",
    "jieba.add_word(\"v\")\n",
    "jieba.add_word(\"240w\")\n",
    "jieba.add_word(\"pro\")\n",
    "#  '14' '15' '150' '150w' '1516' '16' '18w' '1米' '2' '200w' '20w' '22.5' '240w' '25w' '29w' '29wtypec' '2a' '2米' '3' '30w' '33w' '35w' '4' '40w'\n",
    "jieba.add_word(\"150w\")\n",
    "jieba.add_word(\"22.5w\")\n",
    "jieba.add_word(\"18w\")\n",
    "jieba.add_word(\"30w\")\n",
    "jieba.add_word(\"33w\")\n",
    "jieba.add_word(\"35w\")\n",
    "jieba.add_word(\"40w\")\n",
    "jieba.add_word(\"29w\")\n",
    "jieba.add_word(\"typec\")\n",
    "jieba.add_word(\"tpc\")\n",
    "jieba.add_word(\"大壳头\")\n",
    "jieba.add_word(\"头\")\n",
    "jieba.add_word(\"10a\")\n",
    "jieba.add_word(\"6a\")\n",
    "jieba.add_word(\"5a\")\n",
    "jieba.add_word(\"85w\")\n",
    "jieba.add_word(\"max\")\n",
    "jieba.add_word(\"usb\")\n",
    "jieba.add_word(\"d\")\n",
    "jieba.add_word(\"g\")\n",
    "jieba.add_word(\"plus\")\n",
    "jieba.add_word(\"18w\")\n",
    "jieba.add_word(\"12w\")\n",
    "jieba.add_word(\"55w\")\n",
    "jieba.add_word(\"90w\")\n",
    "jieba.add_word(\"55w\")\n",
    "jieba.add_word(\"44w\")\n",
    "jieba.add_word(\"10w\")\n",
    "stop_words = [\"【\", \"】\", \"+\", \"（\", \"）\", \"(\", \")\", \" \", \"*\", \",\", \"/\", \"=\", '-']\n",
    "\n",
    "\n",
    "#stop_words=[]\n",
    "def chinese_tokenizer(context):\n",
    "    # 使用正则表达式去除自定义停止词\n",
    "    for stop_word in stop_words:\n",
    "        context = context.replace(stop_word, ' ').lower()\n",
    "    return list(jieba.cut(context))\n",
    "\n",
    "\n",
    "vectorizer = TfidfVectorizer(\n",
    "    analyzer=chinese_tokenizer,\n",
    "    token_pattern=None,\n",
    "    lowercase=True,\n",
    ")\n",
    "\n",
    "md = ExtraTreesClassifier(criterion=\"entropy\", random_state=42, max_depth=None, max_features=None)\n",
    "pipeline = Pipeline([\n",
    "    ('tfidf', vectorizer),\n",
    "    ('clf', md)\n",
    "])\n",
    "\n",
    "# 创建一个新的 DataFrame 来保存稀有类别的样本\n",
    "df_rare = df[df['code'].apply(lambda x: any(item in rare_labels for item in x))]\n",
    "\n",
    "# 从原始 DataFrame 中移除这些稀有类别的样本\n",
    "df_common = df.drop(df_rare.index)\n",
    "mlb = MultiLabelBinarizer()\n",
    "meta['code'] = meta['code'].apply(lambda x: [x])\n",
    "mlb.fit(meta['code'])\n",
    "#mlb.fit(df_common['code'])\n",
    "Y_common = mlb.transform(df_common['code'])\n",
    "# 拆分常见类别的数据集\n",
    "X_train_common, X_test_common, Y_train_common, Y_test_common = train_test_split(\n",
    "    df_common['name'], Y_common, test_size=0.1, random_state=45\n",
    ")\n",
    "# 将稀有类别的样本添加到训练集中\n",
    "X_train_rare = df_rare['name']\n",
    "Y_train_rare = mlb.transform(df_rare['code'])\n",
    "# print(X_train_rare.shape, Y_train_rare.shape)\n",
    "# 合并训练集\n",
    "X_train = pd.concat([meta['name'], X_train_common, X_train_rare], ignore_index=True)\n",
    "Y_train = np.vstack([mlb.transform(meta['code']), Y_train_common, Y_train_rare])\n",
    "print(X_train.shape, Y_train.shape)\n",
    "# 测试集保持不变\n",
    "X_test = X_test_common\n",
    "Y_test = Y_test_common\n",
    "#print(mlb.classes_)\n",
    "pipeline.fit(X_train, Y_train)\n",
    "Y_pred = pipeline.predict(X_test)\n",
    "#print(pipeline.score(X_train, Y_train))\n",
    "#print(pipeline.score(X_test, Y_test))\n",
    "\n",
    "# 打印特征名称（词汇表）\n",
    "#print(\"Feature names:\", vectorizer.get_feature_names_out())\n",
    "print(classification_report(Y_test, Y_pred, target_names=mlb.classes_))\n",
    "X_pre = [\"【iPad套装】快充头+平果usb线【iPad套装】快充头+2米数据线\"]\n",
    "n_pred = pipeline.predict(X_pre)\n",
    "predicted_codes = mlb.inverse_transform(n_pred)\n",
    "# print(cross_val_score(pipeline, X_train, Y_train, cv=5))\n",
    "for name, codes in zip(X_pre, predicted_codes):\n",
    "    print(f\"Product Name: {name} -> Predicted Codes: {', '.join(codes)}\")\n",
    "\n",
    "\n"
   ],
   "id": "a8b9c34808f00d7d",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(2789,) (2789, 166)\n",
      "              precision    recall  f1-score   support\n",
      "\n",
      "  1010010001       1.00      0.40      0.57         5\n",
      "  1010010002       1.00      1.00      1.00         1\n",
      "  1010010003       0.00      0.00      0.00         0\n",
      "  1010020001       0.75      0.60      0.67         5\n",
      "  1010020002       0.50      0.50      0.50         2\n",
      "  1010020003       1.00      1.00      1.00         1\n",
      "  1010150001       1.00      0.67      0.80         3\n",
      "  1010150002       1.00      0.50      0.67         2\n",
      "  1010150003       0.00      0.00      0.00         0\n",
      "  1020010004       1.00      0.67      0.80         3\n",
      "  1020010005       1.00      0.67      0.80         3\n",
      "  1020010006       1.00      1.00      1.00         2\n",
      "  1020020004       0.00      0.00      0.00         0\n",
      "  1020020005       1.00      1.00      1.00         2\n",
      "  1020020006       1.00      0.33      0.50         3\n",
      "  1020150004       0.00      0.00      0.00         0\n",
      "  1020150005       1.00      0.83      0.91         6\n",
      "  1020150006       1.00      1.00      1.00         3\n",
      "  1030010007       1.00      0.92      0.96        12\n",
      "  1030010008       0.00      0.00      0.00         0\n",
      "  1030010009       1.00      1.00      1.00         1\n",
      "  1030010010       1.00      1.00      1.00         4\n",
      "  1030020007       1.00      1.00      1.00         9\n",
      "  1030020008       0.00      0.00      0.00         1\n",
      "  1030020009       0.00      0.00      0.00         0\n",
      "  1030020010       1.00      1.00      1.00         1\n",
      "  1030150007       1.00      1.00      1.00        13\n",
      "  1030150010       1.00      1.00      1.00         2\n",
      "  1040010011       0.80      0.80      0.80         5\n",
      "  1040010012       1.00      0.89      0.94        18\n",
      "  1040010013       1.00      0.80      0.89         5\n",
      "  1040020011       1.00      0.50      0.67         2\n",
      "  1040020012       0.87      0.76      0.81        17\n",
      "  1040020013       1.00      0.25      0.40         4\n",
      "  1040030012       0.00      0.00      0.00         1\n",
      "  1040150011       1.00      1.00      1.00         3\n",
      "  1040150012       1.00      0.95      0.97        20\n",
      "  1040150013       1.00      1.00      1.00         2\n",
      "  1050010014       0.00      0.00      0.00         0\n",
      "  1050010015       0.00      0.00      0.00         0\n",
      "  1050020014       0.00      0.00      0.00         0\n",
      "  1050020015       0.00      0.00      0.00         0\n",
      "  1050150014       0.00      0.00      0.00         0\n",
      "  1050150015       0.00      0.00      0.00         0\n",
      "  1070010017       1.00      1.00      1.00         2\n",
      "  1070010109       0.00      0.00      0.00         0\n",
      "  1070020017       1.00      0.67      0.80         3\n",
      "  1070020109       0.00      0.00      0.00         0\n",
      "  1070150017       0.00      0.00      0.00         0\n",
      "  1070150109       0.00      0.00      0.00         0\n",
      "  1080010018       1.00      1.00      1.00         1\n",
      "  1080010019       0.89      1.00      0.94         8\n",
      "  1080010020       1.00      0.71      0.83         7\n",
      "  1080010021       1.00      1.00      1.00         3\n",
      "  1080020018       0.00      0.00      0.00         0\n",
      "  1080020019       0.86      1.00      0.92         6\n",
      "  1080020020       1.00      0.71      0.83         7\n",
      "  1080020021       1.00      0.67      0.80         3\n",
      "  1080030019       0.00      0.00      0.00         0\n",
      "  1080150018       1.00      1.00      1.00         1\n",
      "  1080150019       1.00      1.00      1.00         8\n",
      "  1080150020       0.75      1.00      0.86         3\n",
      "  1080150021       1.00      1.00      1.00         2\n",
      "  1100010016       1.00      0.60      0.75         5\n",
      "  1100020016       1.00      0.80      0.89         5\n",
      "  1100150016       1.00      0.75      0.86         4\n",
      "  1110010026       0.67      0.50      0.57         4\n",
      "  1110010027       0.00      0.00      0.00         0\n",
      "  1110020026       0.00      0.00      0.00         2\n",
      "  1110020027       0.00      0.00      0.00         0\n",
      "  1110150026       1.00      1.00      1.00         5\n",
      "  1110150027       0.00      0.00      0.00         0\n",
      "  1130010022       0.00      0.00      0.00         0\n",
      "  1130010023       1.00      0.50      0.67         2\n",
      "  1130010024       1.00      1.00      1.00        19\n",
      "  1130010025       0.00      0.00      0.00         2\n",
      "  1130020022       1.00      1.00      1.00         1\n",
      "  1130020023       1.00      1.00      1.00         5\n",
      "  1130020024       1.00      1.00      1.00         7\n",
      "  1130020025       0.00      0.00      0.00         0\n",
      "  1130150022       0.00      0.00      0.00         0\n",
      "  1130150023       1.00      1.00      1.00         4\n",
      "  1130150024       1.00      1.00      1.00        17\n",
      "  1130150025       1.00      1.00      1.00         2\n",
      "  2010650046       0.00      0.00      0.00         0\n",
      "  2012250042       0.00      0.00      0.00         0\n",
      "  2012250043       1.00      1.00      1.00         2\n",
      "  2012250044       0.00      0.00      0.00         0\n",
      "  2012250045       0.00      0.00      0.00         0\n",
      "  2020120107       0.00      0.00      0.00         1\n",
      "  2020300105       0.00      0.00      0.00         0\n",
      "  2020650028       0.00      0.00      0.00         0\n",
      "  2020650029       0.00      0.00      0.00         0\n",
      "  2020650030       0.00      0.00      0.00         0\n",
      "  2020650031       0.00      0.00      0.00         0\n",
      "  2020650032       0.00      0.00      0.00         0\n",
      "  2020650033       0.00      0.00      0.00         0\n",
      "  2020650034       0.75      1.00      0.86         3\n",
      "  2020650035       0.00      0.00      0.00         0\n",
      "  2020650036       0.00      0.00      0.00         0\n",
      "  2020650037       1.00      1.00      1.00         2\n",
      "  2020650038       0.00      0.00      0.00         0\n",
      "  2030180098       0.00      0.00      0.00         0\n",
      "  2030400091       1.00      0.83      0.91         6\n",
      "  2030400099       0.86      1.00      0.92         6\n",
      "  2032250092       1.00      0.50      0.67         2\n",
      "  2032250093       0.67      0.86      0.75         7\n",
      "  2032250094       0.67      1.00      0.80         2\n",
      "  2032250095       0.00      0.00      0.00         0\n",
      "  2032250096       1.00      1.00      1.00         4\n",
      "  2032250097       0.00      0.00      0.00         0\n",
      "  2032250106       0.00      0.00      0.00         0\n",
      "  2040100059       0.00      0.00      0.00         0\n",
      "  2040180058       0.00      0.00      0.00         0\n",
      "  2040200041       0.00      0.00      0.00         0\n",
      "  2040400048       1.00      1.00      1.00        11\n",
      "  2040400050       0.00      0.00      0.00         1\n",
      "  2040400052       0.50      0.50      0.50         4\n",
      "  2040400054       1.00      1.00      1.00         2\n",
      "  2040400055       0.00      0.00      0.00         0\n",
      "  2040400057       0.00      0.00      0.00         0\n",
      "  2040400061       0.00      0.00      0.00         0\n",
      "  2042250039       1.00      0.80      0.89         5\n",
      "  2042250040       1.00      1.00      1.00         8\n",
      "  2042250041       0.00      0.00      0.00         0\n",
      "  2042250047       0.00      0.00      0.00         0\n",
      "  2042250049       0.00      0.00      0.00         0\n",
      "  2042250051       0.00      0.00      0.00         2\n",
      "  2042250053       0.00      0.00      0.00         1\n",
      "  2042250056       0.00      0.00      0.00         1\n",
      "  2042250060       1.00      1.00      1.00         1\n",
      "  2042250090       1.00      0.71      0.83         7\n",
      "  2070100108       1.00      0.67      0.80         3\n",
      "  2080100073       0.00      0.00      0.00         0\n",
      "  2080180068       0.00      0.00      0.00         0\n",
      "  2080180069       0.00      0.00      0.00         0\n",
      "  2080650067       1.00      0.50      0.67         2\n",
      "  2082250063       0.00      0.00      0.00         0\n",
      "  2082250064       0.64      0.70      0.67        10\n",
      "  2082250065       0.60      0.50      0.55         6\n",
      "  2082250066       0.00      0.00      0.00         0\n",
      "  2082250070       0.00      0.00      0.00         0\n",
      "  2082250071       1.00      1.00      1.00         3\n",
      "  2082250072       1.00      0.88      0.93         8\n",
      "  2100650062       1.00      0.40      0.57         5\n",
      "  2100650100       0.00      0.00      0.00         0\n",
      "  2110250076       1.00      0.20      0.33         5\n",
      "  2112250074       0.67      0.67      0.67         3\n",
      "  2112250075       0.00      0.00      0.00         0\n",
      "  2120200078       0.00      0.00      0.00         0\n",
      "  2120300077       0.86      0.67      0.75         9\n",
      "  2130100088       0.00      0.00      0.00         0\n",
      "  2130200085       0.00      0.00      0.00         0\n",
      "  2130650084       0.00      0.00      0.00         0\n",
      "  2130650086       1.00      0.33      0.50         3\n",
      "  2132250079       1.00      0.86      0.92         7\n",
      "  2132250080       1.00      0.75      0.86         4\n",
      "  2132250081       0.83      0.83      0.83         6\n",
      "  2132250082       1.00      0.67      0.80         6\n",
      "  2132250083       0.00      0.00      0.00         3\n",
      "  2132250087       0.00      0.00      0.00         3\n",
      "  2132250089       0.92      0.92      0.92        13\n",
      "  2140300101       0.00      0.00      0.00         0\n",
      "  2140450104       0.00      0.00      0.00         0\n",
      "  2140650103       0.00      0.00      0.00         0\n",
      "  2142250102       0.00      0.00      0.00         0\n",
      "\n",
      "   micro avg       0.92      0.80      0.86       476\n",
      "   macro avg       0.50      0.43      0.45       476\n",
      "weighted avg       0.91      0.80      0.84       476\n",
      " samples avg       0.84      0.81      0.82       476\n",
      "\n",
      "Product Name: 【iPad套装】快充头+平果usb线【iPad套装】快充头+2米数据线 -> Predicted Codes: 1020020004, 2020120107\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\python\\pytorch-learn\\venv\\Lib\\site-packages\\sklearn\\metrics\\_classification.py:1565: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, f\"{metric.capitalize()} is\", len(result))\n",
      "D:\\python\\pytorch-learn\\venv\\Lib\\site-packages\\sklearn\\metrics\\_classification.py:1565: UndefinedMetricWarning: Recall is ill-defined and being set to 0.0 in labels with no true samples. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, f\"{metric.capitalize()} is\", len(result))\n",
      "D:\\python\\pytorch-learn\\venv\\Lib\\site-packages\\sklearn\\metrics\\_classification.py:1565: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no true nor predicted samples. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, f\"{metric.capitalize()} is\", len(result))\n",
      "D:\\python\\pytorch-learn\\venv\\Lib\\site-packages\\sklearn\\metrics\\_classification.py:1565: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in samples with no predicted labels. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, f\"{metric.capitalize()} is\", len(result))\n"
     ]
    }
   ],
   "execution_count": 4
  },
  {
   "metadata": {},
   "cell_type": "code",
   "source": "",
   "id": "7e66d0f88e9722c7",
   "outputs": [],
   "execution_count": null
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
