<!DOCTYPE html>
<html>
<head><meta name="generator" content="Hexo 3.8.0">
  <meta charset="utf-8">
  
  <meta name="renderer" content="webkit">
  <meta http-equiv="X-UA-Compatible" content="IE=edge">
  <link rel="dns-prefetch" href="http://www.guzhipin.top">
  <title>Python数据挖掘入门与实践 | quekai&#39;s blog</title>
  <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
  <meta name="description" content="Python数据挖掘入门与实践第一章 开始数据挖掘之旅1.1 数据挖掘简介 数据集主要包括以下两个部分： 表示真实世界中物体的样本。 描述数据集中样本的特征。  特征抽取是数据挖掘过程的一个重要环节。   1.2 使用Python和IPython Notebook pip3 freeze命令测试pip能否正常运行。 只为当前用户安装ipython 可用命令 pip install --user i">
<meta name="keywords" content="Python笔记">
<meta property="og:type" content="article">
<meta property="og:title" content="Python数据挖掘入门与实践">
<meta property="og:url" content="http://www.guzhipin.top/2019/08/04/Python数据挖掘入门与实践/index.html">
<meta property="og:site_name" content="quekai&#39;s blog">
<meta property="og:description" content="Python数据挖掘入门与实践第一章 开始数据挖掘之旅1.1 数据挖掘简介 数据集主要包括以下两个部分： 表示真实世界中物体的样本。 描述数据集中样本的特征。  特征抽取是数据挖掘过程的一个重要环节。   1.2 使用Python和IPython Notebook pip3 freeze命令测试pip能否正常运行。 只为当前用户安装ipython 可用命令 pip install --user i">
<meta property="og:locale" content="default">
<meta property="og:updated_time" content="2019-08-22T15:18:35.103Z">
<meta name="twitter:card" content="summary">
<meta name="twitter:title" content="Python数据挖掘入门与实践">
<meta name="twitter:description" content="Python数据挖掘入门与实践第一章 开始数据挖掘之旅1.1 数据挖掘简介 数据集主要包括以下两个部分： 表示真实世界中物体的样本。 描述数据集中样本的特征。  特征抽取是数据挖掘过程的一个重要环节。   1.2 使用Python和IPython Notebook pip3 freeze命令测试pip能否正常运行。 只为当前用户安装ipython 可用命令 pip install --user i">
  
    <link rel="alternative" href="/atom.xml" title="quekai&#39;s blog" type="application/atom+xml">
  
  
    <link rel="icon" href="/favicon.png">
  
  <link rel="stylesheet" type="text/css" href="/./main.0cf68a.css">
  <style type="text/css">
  
    #container.show {
      background: linear-gradient(200deg,#a0cfe4,#e8c37e);
    }
  </style>
  

  

</head>
</html>
<body>
  <div id="container" q-class="show:isCtnShow">
    <canvas id="anm-canvas" class="anm-canvas"></canvas>
    <div class="left-col" q-class="show:isShow">
      
<div class="overlay" style="background: #4d4d4d"></div>
<div class="intrude-less">
	<header id="header" class="inner">
		<a href="/" class="profilepic">
			<img src class="js-avatar">
		</a>
		<hgroup>
		  <h1 class="header-author"><a href="/">quekai</a></h1>
		</hgroup>
		

		<nav class="header-menu">
			<ul>
			
				<li><a href="/">主页</a></li>
	        
				<li><a href="/tags/随笔/">随笔</a></li>
	        
			</ul>
		</nav>
		<nav class="header-smart-menu">
    		
    			
    			<a q-on="click: openSlider(e, 'innerArchive')" href="javascript:void(0)">所有文章</a>
    			
            
    			
    			<a q-on="click: openSlider(e, 'friends')" href="javascript:void(0)">友链</a>
    			
            
    			
    			<a q-on="click: openSlider(e, 'aboutme')" href="javascript:void(0)">关于我</a>
    			
            
		</nav>
		<nav class="header-nav">
			<div class="social">
				
					<a class="github" target="_blank" href="https://github.com/quekai" title="github"><i class="icon-github"></i></a>
		        
					<a class="weibo" target="_blank" href="https://weibo.com/u/5810842966/" title="weibo"><i class="icon-weibo"></i></a>
		        
					<a class="rss" target="_blank" href="#" title="rss"><i class="icon-rss"></i></a>
		        
					<a class="zhihu" target="_blank" href="https://www.zhihu.com/people/quekai/" title="zhihu"><i class="icon-zhihu"></i></a>
		        
			</div>
		</nav>
	</header>		
</div>

    </div>
    <div class="mid-col" q-class="show:isShow,hide:isShow|isFalse">
      
<nav id="mobile-nav">
  	<div class="overlay js-overlay" style="background: #4d4d4d"></div>
	<div class="btnctn js-mobile-btnctn">
  		<div class="slider-trigger list" q-on="click: openSlider(e)"><i class="icon icon-sort"></i></div>
	</div>
	<div class="intrude-less">
		<header id="header" class="inner">
			<div class="profilepic">
				<img src class="js-avatar">
			</div>
			<hgroup>
			  <h1 class="header-author js-header-author">quekai</h1>
			</hgroup>
			
			
			
				
			
				
			
			
			
			<nav class="header-nav">
				<div class="social">
					
						<a class="github" target="_blank" href="https://github.com/quekai" title="github"><i class="icon-github"></i></a>
			        
						<a class="weibo" target="_blank" href="https://weibo.com/u/5810842966/" title="weibo"><i class="icon-weibo"></i></a>
			        
						<a class="rss" target="_blank" href="#" title="rss"><i class="icon-rss"></i></a>
			        
						<a class="zhihu" target="_blank" href="https://www.zhihu.com/people/quekai/" title="zhihu"><i class="icon-zhihu"></i></a>
			        
				</div>
			</nav>

			<nav class="header-menu js-header-menu">
				<ul style="width: 50%">
				
				
					<li style="width: 50%"><a href="/">主页</a></li>
		        
					<li style="width: 50%"><a href="/tags/随笔/">随笔</a></li>
		        
				</ul>
			</nav>
		</header>				
	</div>
	<div class="mobile-mask" style="display:none" q-show="isShow"></div>
</nav>

      <div id="wrapper" class="body-wrap">
        <div class="menu-l">
          <div class="canvas-wrap">
            <canvas data-colors="#eaeaea" data-sectionHeight="100" data-contentId="js-content" id="myCanvas1" class="anm-canvas"></canvas>
          </div>
          <div id="js-content" class="content-ll">
            <article id="post-Python数据挖掘入门与实践" class="article article-type-post " itemscope itemprop="blogPost">
  <div class="article-inner">
    
      <header class="article-header">
        
  
    <h1 class="article-title" itemprop="name">
      Python数据挖掘入门与实践
    </h1>
  

        
        <a href="/2019/08/04/Python数据挖掘入门与实践/" class="archive-article-date">
  	<time datetime="2019-08-04T14:13:11.000Z" itemprop="datePublished"><i class="icon-calendar icon"></i>2019-08-04</time>
</a>
        
      </header>
    
    <div class="article-entry" itemprop="articleBody">
      
        <h1 id="Python数据挖掘入门与实践"><a href="#Python数据挖掘入门与实践" class="headerlink" title="Python数据挖掘入门与实践"></a>Python数据挖掘入门与实践</h1><h2 id="第一章-开始数据挖掘之旅"><a href="#第一章-开始数据挖掘之旅" class="headerlink" title="第一章 开始数据挖掘之旅"></a>第一章 开始数据挖掘之旅</h2><h3 id="1-1-数据挖掘简介"><a href="#1-1-数据挖掘简介" class="headerlink" title="1.1 数据挖掘简介"></a>1.1 数据挖掘简介</h3><ul>
<li><p>数据集主要包括以下两个部分：</p>
<p>表示真实世界中物体的样本。</p>
<p>描述数据集中样本的特征。</p>
</li>
<li><p>特征抽取是数据挖掘过程的一个重要环节。</p>
</li>
</ul>
<h3 id="1-2-使用Python和IPython-Notebook"><a href="#1-2-使用Python和IPython-Notebook" class="headerlink" title="1.2 使用Python和IPython Notebook"></a>1.2 使用Python和IPython Notebook</h3><ul>
<li><code>pip3 freeze</code>命令测试pip能否正常运行。</li>
<li>只为当前用户安装ipython 可用命令 <code>pip install --user ipython[all]</code></li>
<li>使用命令 <code>jupyter notebook</code>创建IPython Notebook实例，并打开web浏览器连接到实例。ctrl+C关闭。</li>
<li>pip安装scikit-learn</li>
</ul>
<h3 id="1-3-亲和性分析示例"><a href="#1-3-亲和性分析示例" class="headerlink" title="1.3 亲和性分析示例"></a>1.3 亲和性分析示例</h3><ul>
<li><p>亲和性分析：根据样本个体之间的相似度，确定它们关系的亲疏。应用场景如下</p>
<p>向网站用户提供多样化服务或投放定向广告</p>
<p>为用户推荐电影或商品，而卖给他们一些与之相关的玩意</p>
<p>根据基因寻找有亲缘关系的人</p>
</li>
<li><p>商品推荐。根据数据挖掘，我们希望得到以下规则：如果一个人买了商品X，那么他很有可能购买商品Y。</p>
</li>
<li><p>本书源码来自PacktPublishing</p>
</li>
<li><p>规则的优劣常见的衡量方法是支持度和置信度。</p>
</li>
<li><p>支持度指数据集中规则应验的次数，有时需要对支持度进行规范化（除以总数量）。</p>
</li>
<li><p>置信度衡量的是规则准确率如何，即符合前提条件的所有规则里，跟当前规则结论一致的比例有多大。</p>
</li>
<li><p>分别为规则应验和规则无效这两种情况创建字典。字典的键是由条件和结论组成的元组，如（3，4）.</p>
</li>
<li><p><code>from collections import defaultdict</code>使用defaultdict使得查找的键不存在时可以返回默认值</p>
</li>
<li><p><code>from operator import itemgetter</code>可以获取字典各元素的值，<code>itemgetter(1)</code></p>
</li>
</ul>
<h3 id="1-4-什么是分类"><a href="#1-4-什么是分类" class="headerlink" title="1.4 什么是分类"></a>1.4 什么是分类</h3><ul>
<li>分类应用的目标是，根据已知类别的数据集，经过训练得到一个分类模型，再用模型对类别未知的数据进行分类。</li>
<li>过拟合：模型在训练集上表现很好，但对于没有见过的数据表现很差。不要使用训练数据测试算法。</li>
<li><code>from sklearn.model_selection import train_test_split</code> 可将数据集划分为训练集和测试集。</li>
</ul>
<h2 id="第二章-用scikit-learn估计器分类"><a href="#第二章-用scikit-learn估计器分类" class="headerlink" title="第二章 用scikit-learn估计器分类"></a>第二章 用scikit-learn估计器分类</h2><h3 id="2-1-scikit-learn-估计器"><a href="#2-1-scikit-learn-估计器" class="headerlink" title="2.1 scikit-learn 估计器"></a>2.1 scikit-learn 估计器</h3><ul>
<li>估计器：用于分类、聚类和回归分析</li>
<li>转换器：用于数据预处理和数据转换</li>
<li>流水线：组合数据挖掘流程，便于再次使用。</li>
<li>sklearn估计器包括fit()和predict()两个函数，接收和输出格式均为numpy数组或类似格式。</li>
<li>近邻算法：查找训练集，找到和新个体最相似的哪些个体，看这些个体属于哪个类别，就把新个体分到哪个类别。要计算每两个个体之间的距离，计算量大。在特征取离散值的数据集上表现很差。</li>
<li>欧式距离：即真实距离，是两个特征向量长度平方和的平方根。直观，但当某些特征取值比其他特征大得多，或很多特征值为0即稀疏矩阵时，结果不准确，此时可使用曼哈顿距离和余弦距离。</li>
<li>曼哈顿距离为两个特征在标准坐标系中的绝对轴距之和，受异常值的影响比欧氏距离小。但当某些特征取值比其他特征大得多，这些特征会掩盖其他特征间的近邻关系。</li>
<li>余弦距离更适合解决异常值和数据稀疏问题，指的是特征向量夹角的余弦值。适用于特征向量很多的情况，丢弃了向量长度所包含的在某些场景下有用的信息。</li>
<li>交叉检验算法描述如下：<ul>
<li>将整个大数据集分为几个部分</li>
<li>对于每个部分执行以下操作：<ul>
<li>将其中一部分作为当前测试集</li>
<li>用剩余部分训练算法</li>
<li>在当前测试集上测试算法</li>
</ul>
</li>
<li>记录每次得分及平均得分</li>
<li>在上述过程中，每条数据只能在测试集中出现一次，以减少运气成分。</li>
</ul>
</li>
<li>from sklearn.model_selection import cross_val_score 默认使用Stratified K Fold方法切分数据集，保证切分后的数据集中类别分别大致相同。用此函数进行交叉检验。</li>
<li><code>%matplotlib inline</code> 来告知要在notebook里作图</li>
</ul>
<h3 id="2-2-流水线在预处理中的应用"><a href="#2-2-流水线在预处理中的应用" class="headerlink" title="2.2 流水线在预处理中的应用"></a>2.2 流水线在预处理中的应用</h3><ul>
<li>规范化：特征值的大小和该特征的分类效果没有任何关系，所以要对不同的特征进行规范化，使得它们的特征落在相同的值域或几个确定的类别。</li>
<li>选取最具区分度的特征、创建新特征都属于预处理的范畴。sklearn中的预处理工具叫做转换器。</li>
<li><code>from sklearn.preprocessing import MinMaxScaler</code>进行基于特征的规范化，把每个特征值域规范到0和1之间。<ul>
<li>为使每条数据特征值和为1，使用Normalizer</li>
<li>为使各特征的均值为0，使用StandardScaler</li>
<li>为将数值型特征二值化，使用Binarizer，大于阈值为1，反之为0</li>
</ul>
</li>
</ul>
<h3 id="2-3-流水线"><a href="#2-3-流水线" class="headerlink" title="2.3 流水线"></a>2.3 流水线</h3><ul>
<li>随着实验的增加，操作复杂程度也在提高，可能导致错误操作或操作顺序不当的问题。流水线就是用来解决这个问题的。流水线将这些步骤保存到工作流中，以便之后的数据读取以及预处理等操作。</li>
<li><code>from sklearn.pipeline import Pipeline</code> 流水线的输入为一连串的数据挖掘步骤，接着是转换器，最后是估计器，每一步的结果作为下一步的输出。</li>
<li>每一步都用元组(‘名称’,’步骤’)来表示。如<code>scaling_pipline = Pipeline([(&#39;scale&#39;, MinMaxScaler()), (&#39;predict&#39;, KNeighborsClassifier())])</code></li>
</ul>
<h2 id="第三章-用决策树预测获胜球队"><a href="#第三章-用决策树预测获胜球队" class="headerlink" title="第三章 用决策树预测获胜球队"></a>第三章 用决策树预测获胜球队</h2><h3 id="3-1-加载数据集"><a href="#3-1-加载数据集" class="headerlink" title="3.1 加载数据集"></a>3.1 加载数据集</h3><ul>
<li>决策树的一大优点是人和及其都能看懂</li>
<li>pandas.readcsv函数提供了修复数据的参数，<code>pd.read_csv(data_filename, parse_dates=[&quot;Dates&quot;], skiprows=[0,])</code></li>
<li>用<code>dataset.columns=[]</code>修改头部。</li>
<li>用<code>dataset[].values</code>提取数组。</li>
<li>用<code>for index, row in dataset.iterrows()</code>遍历每一行。用for index, row in dataset.sort(“Date”).iterrows()按某一列顺序遍历。</li>
</ul>
<h3 id="3-2-决策树"><a href="#3-2-决策树" class="headerlink" title="3.2 决策树"></a>3.2 决策树</h3><ul>
<li>决策树是一种积极算法，需要进行训练，而近邻算法是惰性算法，分类时才开始干活。</li>
<li>决策树在从根节点起每层选取该层的最佳特征用于决策，到达下一个节点，选择下一个最佳特征，以此类推。当无法从增加树的层级中获得更多信息时，启动退出机制。</li>
<li>sklearn实现了分类回归树算法（CART）并将其作为生成决策树的默认算法，支持连续型和类别型特征。</li>
<li>退出准则可以防止过拟合。除了退出准则外，也可以先建立完整的树，再进行剪枝，去掉对整个过程没有提供太多信息的节点。</li>
<li>使用<ul>
<li>min_samples_split：指定创建一个新节点至少需要的个体数量</li>
<li>min_samples_leaf：指定为保留节点每个节点至少应该保留的个体数量。</li>
</ul>
</li>
<li>创建决策的标准，有：<ul>
<li>基尼不纯度：用于衡量决策节点错误预测新个体类别的比例。</li>
<li>信息增益：用信息论中的熵表示决策节点提供多少新信息。</li>
</ul>
</li>
<li><code>from sklearn.tree import DecisionTreeClassifier</code>创建决策树。</li>
</ul>
<h3 id="3-3-NBA比赛结果预测"><a href="#3-3-NBA比赛结果预测" class="headerlink" title="3.3 NBA比赛结果预测"></a>3.3 NBA比赛结果预测</h3><ul>
<li><p><code>from sklearn.preprocessing import LabelEncoder</code>转换器将字符串类型的球队名转化为整型，以满足sklearn决策树的需求。</p>
<p>encoding = LabelEncoder()</p>
<p>encoding.fit(dataset[“”].values)</p>
<p>home_teams = encoding.transform(dataset[“”].values)</p>
</li>
<li><p>np.vstack，np.hstack将向量组合起来，形成一个矩阵。</p>
</li>
<li><p>使用LabelEncoder转换得到的整型仍被认为是连续型特征，即1与2比1与3更相似。</p>
</li>
<li><p>使用OneHotEncoder可以将整数转化为二进制数字，特征有多少种类型就有多少位二进制数字，第几个类型第几个二进制位为1，其余为0。如001，010，100.</p>
</li>
</ul>
<h3 id="3-4-随机森林"><a href="#3-4-随机森林" class="headerlink" title="3.4 随机森林"></a>3.4 随机森林</h3><ul>
<li><p>决策树可能出现过拟合的情况，解决方法之一是调整决策树算法，限制它所学到的规则的数量。使用这种方法会导致决策树泛化能力强，但整体表现稍弱。</p>
</li>
<li><p>随机森林通过创建多棵决策树，用它们分别进行预测，再根据少数服从多少的原则选择最终预测结果。</p>
<ul>
<li>装袋：每次随机从数据集中选取一部分数据作为训练集。</li>
<li>选取部分决策特征作为决策依据：前几个决策节点的特征非常突出，随机选取的训练集仍具有较大相似性。</li>
</ul>
</li>
<li><p>方差是由训练集的变化引起的。决策树这种方差大的算法极易受到训练集变化的影响，从而产生过拟合问题。随机森林对大量决策树的预测结果取均值，能有效降低方差。</p>
</li>
<li><p>偏误是由算法的假设引起的，与数据集没有关系。</p>
</li>
<li><p>决策树集成做出了以下假设：预测过程具有因分类器而异的随机性，使用多个模型得到的预测结果的均值，能够消除随机误差的影响。</p>
</li>
<li><p><code>from sklearn.ensemble import RandomForestClassifier</code>提供了<code>DecisionTreeClassifier</code>的的参数，如决策标准（基尼不纯度/信息增益）、max_features、min_samples_split等。也引入了新参数：</p>
<ul>
<li>n_esimators：指定决策树数量。</li>
<li>oob_score：设为真，则 测试时不会使用训练时用过的数据。</li>
<li>n_jobs：并行计算使用的内核数量。</li>
</ul>
</li>
<li><p>使用<code>GridSearchCV()</code>搜索最佳参数：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br></pre></td><td class="code"><pre><span class="line">parameter_space = &#123;</span><br><span class="line">                   <span class="string">"max_features"</span>: [<span class="number">2</span>, <span class="number">10</span>, <span class="string">'auto'</span>],</span><br><span class="line">                   <span class="string">"n_estimators"</span>: [<span class="number">100</span>,],</span><br><span class="line">                   <span class="string">"criterion"</span>: [<span class="string">"gini"</span>, <span class="string">"entropy"</span>],</span><br><span class="line">                   <span class="string">"min_samples_leaf"</span>: [<span class="number">2</span>, <span class="number">4</span>, <span class="number">6</span>],</span><br><span class="line">                   &#125;</span><br><span class="line">clf = RandomForestClassifier(random_state=<span class="number">14</span>)</span><br><span class="line">grid = GridSearchCV(clf, parameter_space)</span><br></pre></td></tr></table></figure>
<p>可使用<code>grid.best_estimator_</code>查看使用了哪些参数。</p>
</li>
<li><p>使用<code>dataset[&quot;&quot;] = feature_creator(dataset)</code>创建新特征。</p>
</li>
</ul>
<h2 id="第四章-用亲和性分析推荐电影"><a href="#第四章-用亲和性分析推荐电影" class="headerlink" title="第四章 用亲和性分析推荐电影"></a>第四章 用亲和性分析推荐电影</h2><h3 id="4-1-亲和性分析"><a href="#4-1-亲和性分析" class="headerlink" title="4.1 亲和性分析"></a>4.1 亲和性分析</h3><ul>
<li>亲和性分析用来找出两个对象共同出现的情况。应用场景如：<ul>
<li>欺诈检测</li>
<li>顾客区分</li>
<li>软件优化</li>
<li>产品推荐</li>
</ul>
</li>
<li>Apriori算法是经典的亲和性分析算法。从数据集中频繁出现的商品中选取共同出现的商品组成频繁项集，避免复杂度呈指数增长的问题。<ul>
<li>最小支持度：要生存A,B的频繁项集（A,B），要求最小支持度为30，则A和B都必须在数据集中出现30次。更大的频繁项集如（A,B,C）的子集（A,B）也要是满足最小支持度的频繁项集。</li>
<li>生成频繁项集后，再考虑其他不够频繁的项集。</li>
</ul>
</li>
<li>Apriori算法过程为：<ul>
<li>设定最小支持度，找出频繁项集。</li>
<li>根据置信度选取关联规则。</li>
</ul>
</li>
</ul>
<h3 id="4-2-电影推荐问题"><a href="#4-2-电影推荐问题" class="headerlink" title="4.2 电影推荐问题"></a>4.2 电影推荐问题</h3><ul>
<li><code>all_ratings = pd.read_csv(ratings_filename, delimiter=&#39;\t&#39;, header=None, names=[&quot;UserID&quot;, &quot;MovieID&quot;, &quot;Rating&quot;, &quot;Datetime&quot;])</code>将识别制表符作为分隔符，没有表头，添加表头。</li>
<li><code>all_ratings[&quot;Datetime&quot;] = pd.to_datetime(all_ratings[&quot;Datetime&quot;],unit=&#39;s&#39;)</code>解析时间戳数据，设定单位为秒。</li>
<li>稀疏矩阵格式：即对不存在的数据不存储，而不是存放大量的0。</li>
</ul>
<h3 id="4-3-Apriori算法的实现"><a href="#4-3-Apriori算法的实现" class="headerlink" title="4.3 Apriori算法的实现"></a>4.3 Apriori算法的实现</h3><ul>
<li><p><code>ratings = all_ratings[all_ratings[&quot;UserID&quot;].isin(range(200))]</code> 选取一部分数据作训练集，减少搜索空间。</p>
</li>
<li><p><code>favorable_ratings = ratings[ratings[&quot;Favorable&quot;]]</code> 新建数据集，只保留某一行。</p>
</li>
<li><p><code>favorable_reviews_by_users = dict((k,frozenset(v.values)) for k, v in favorable_ratings.groupby(&quot;UserID&quot;)[&quot;MovieID&quot;])</code>用.groupby进行分组，frozenset是固定不变的集合，速度快于列表。</p>
</li>
<li><p>Apriori算法过程如下：</p>
<ul>
<li>把各项放到只包含子集的项集中，生成最初的频繁项集。只使用达到最小支持度的项。</li>
<li>查找现有频繁项集的超集，发现新的备选项集。</li>
<li>测试新生成备选项集的频繁程度，如果不够频繁则舍弃。如果没有新的频繁项集，则跳到最后一步。</li>
<li>存储新发现的频繁项集，跳到第二步。</li>
<li>返回所有频繁项集。</li>
</ul>
</li>
<li><p>第一步：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"> frequent_itemsets[<span class="number">1</span>] = dict((frozenset((movie_id,)),row[<span class="string">"Favorable"</span>]) </span><br><span class="line"><span class="keyword">for</span> movie_id, row <span class="keyword">in</span> num_favorable_by_movie.iterrows() </span><br><span class="line"><span class="keyword">if</span> row[<span class="string">"Favorable"</span>] &gt; min_support)</span><br></pre></td></tr></table></figure>
</li>
<li><p>第二三步：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">find_frequent_itemsets</span><span class="params">(favorable_reviews_by_users, k_1_itemsets, min_support)</span>:</span></span><br><span class="line">    counts = defaultdict(int)</span><br><span class="line">    <span class="keyword">for</span> user, reviews <span class="keyword">in</span> favorable_reviews_by_users.items():</span><br><span class="line">        <span class="keyword">for</span> itemset <span class="keyword">in</span> k_1_itemsets:</span><br><span class="line">            <span class="keyword">if</span> itemset.issubset(reviews):</span><br><span class="line">                <span class="keyword">for</span> other_reviewed_movie <span class="keyword">in</span> reviews - itemset:</span><br><span class="line">                    current_superset = itemset | frozenset((other_reviewed_movie,))</span><br><span class="line">                    counts[current_superset] += <span class="number">1</span></span><br><span class="line">    <span class="keyword">return</span> dict([(itemset, frequency)</span><br><span class="line">               <span class="keyword">for</span> itemset, frequency <span class="keyword">in</span> counts.item()</span><br><span class="line">               <span class="keyword">if</span> frequency &gt;= min_support])</span><br></pre></td></tr></table></figure>
</li>
<li><p>第四五步</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">for</span> k <span class="keyword">in</span> range(<span class="number">2</span>, <span class="number">20</span>):</span><br><span class="line">    cur_frequent_itemsets = find_frequent_itemsets(favorable_reviews_by_users, frequent_itemsets[k<span class="number">-1</span>], min_support)</span><br><span class="line">    frequent_itemsets[k] = cur_frequent_itemsets</span><br><span class="line">    <span class="keyword">if</span> len(cur_frequent_itemsets) == <span class="number">0</span>:</span><br><span class="line">        print(<span class="string">"Did not find any frequent itemsets of length &#123;&#125;"</span>.format(k))</span><br><span class="line">        sys.stdout.flush()</span><br><span class="line">        <span class="keyword">break</span></span><br><span class="line">    <span class="keyword">else</span>:</span><br><span class="line">        print(<span class="string">"I found &#123;&#125; frequent itemsets of length &#123;&#125;"</span>.format(len(cur_frequent_itemsets), k))</span><br><span class="line">        sys.stdout.flush()</span><br><span class="line"><span class="keyword">del</span> frequent_itemsets[<span class="number">1</span>]</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h2 id="第五章-用转换器抽取特征"><a href="#第五章-用转换器抽取特征" class="headerlink" title="第五章 用转换器抽取特征"></a>第五章 用转换器抽取特征</h2><h3 id="5-1-特征抽取"><a href="#5-1-特征抽取" class="headerlink" title="5.1 特征抽取"></a>5.1 特征抽取</h3><ul>
<li>特征抽取是数据挖掘任务最重要的环境，对最终结果的影响高于数据挖掘算法本身。</li>
<li>特征选择降低真实世界的复杂度，模型比现实更容易操纵。</li>
<li>简化要以数据挖掘的目标为核心。</li>
<li>简化会忽略很多细节，甚至会抛弃很多对数据挖掘算法能力起到帮助作用的信息。</li>
<li>不是所有特征必须是数值型或类别型值，直接作用于文本、图像和其他数据结构的算法已经研究出来了。</li>
<li><code>adult.dropna(how=&quot;all&quot;, inplace=True)</code>删除包含无效数据的行，inplace为真表明在当前数据框中修改，而不是新建一个数据框。</li>
<li><code>adult[&quot;Hours-per-week&quot;].describe()</code>提供了常见统计量的计算。</li>
<li><code>adult[&quot;Work-Class&quot;].unique()</code>得到特征的所有不同情况。</li>
</ul>
<h3 id="5-2-特征选择"><a href="#5-2-特征选择" class="headerlink" title="5.2 特征选择"></a>5.2 特征选择</h3><ul>
<li><p>特征选择的原因如下：</p>
<ul>
<li>降低复杂度</li>
<li>降低噪音</li>
<li>增加模型可读性</li>
</ul>
</li>
<li><p><code>X = np.arange(30).reshape((10, 3))</code>创建0到29，30个数字，分为3列10行。</p>
</li>
<li><p>删除方差达不到最低标准的特征。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.feature_selection <span class="keyword">import</span> VarianceThreshold</span><br><span class="line">vt = VarianceThreshold()</span><br><span class="line">xt = vt.fit_transform(X)</span><br><span class="line">print(vt.variances_)</span><br></pre></td></tr></table></figure>
</li>
<li><p>随着特征的增加，寻找最佳特征组合的时间复杂度是呈指数级增长的，变通的方法是寻找表现好的单个特征，一般是测量变量与目标类别之间的某种相关性。</p>
</li>
<li><p>SelectKBest返回k个最佳特征，SelectPercentile返回最佳的前r%个特征。计算单个特征与某一类别之间相关性的计算方法有卡方检验，互信息和信息熵。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.feature_selection <span class="keyword">import</span> SelectKBest</span><br><span class="line"><span class="keyword">from</span> sklearn.feature_selection <span class="keyword">import</span> chi2</span><br><span class="line">transformer = SelectKBest(score_func=chi2, k=<span class="number">3</span>)</span><br><span class="line">xt_chi2 = transformer.fit_transform(X, y)</span><br><span class="line">print(transformer.scores_)</span><br></pre></td></tr></table></figure>
</li>
<li><p>也可使用皮尔逊相关系数计算相关性。皮尔逊相关系数为-1到1的值，绝对值越大，相关性越大。<code>from scipy.stats import pearsonr</code>scipy实现的皮尔逊相关系数接收（X，y），返回每个特征的皮尔逊相关系数和p值，X为该特征列。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">multivariate_pearsonr</span><span class="params">(X, y)</span>:</span></span><br><span class="line">    scores, pvalues = [], []</span><br><span class="line">    <span class="keyword">for</span> column <span class="keyword">in</span> range(X.shape[<span class="number">1</span>]):</span><br><span class="line">        cur_score, cur_p = pearsonr(X[:, column], y)</span><br><span class="line">        scores.append(abs(cur_score))</span><br><span class="line">        pvalues.append(cur_p)</span><br><span class="line">    <span class="keyword">return</span> (np.array(scores), np.array(pvalues))</span><br><span class="line">transformer = SelectKBest(score_func=multivariate_pearsonr, k=<span class="number">3</span>)</span><br><span class="line">xt_pearson = transformer.fit_transform(X, y)</span><br><span class="line">print(transformer.scores_)</span><br></pre></td></tr></table></figure>
</li>
<li><p>哪些特征是好的没有标准答案，取决于度量标准。</p>
</li>
</ul>
<h3 id="5-3-创建特征"><a href="#5-3-创建特征" class="headerlink" title="5.3 创建特征"></a>5.3 创建特征</h3><ul>
<li><p>数据集中原始特征可能会出现特征间相关性很强，特征冗余等情况，增加算法除了难度，因此要创建新特征。</p>
</li>
<li><p>使用converters修复数据。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">convert_number</span><span class="params">(x)</span>:</span></span><br><span class="line">    <span class="keyword">try</span>:</span><br><span class="line">        <span class="keyword">return</span> float(x)</span><br><span class="line">    <span class="keyword">except</span> ValueError:</span><br><span class="line">        <span class="keyword">return</span> np.nan</span><br><span class="line"><span class="keyword">from</span> collections <span class="keyword">import</span> defaultdict</span><br><span class="line">converters = defaultdict(convert_number)</span><br><span class="line">converters[<span class="number">1558</span>] = <span class="keyword">lambda</span> x: <span class="number">1</span> <span class="keyword">if</span> x.strip() == <span class="string">'ad.'</span> <span class="keyword">else</span> <span class="number">0</span></span><br><span class="line">ads = pd.read_csv(data_filename, header=<span class="literal">None</span>, converters=converters)</span><br></pre></td></tr></table></figure>
</li>
<li><p>主成分分析算法的目的是找到能用较少信息描述数据集的特征组合。意在发现彼此之间没有相关性、能够描述数据集、特征方差与整体方差相近的特征，即主成分。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.decomposition <span class="keyword">import</span> PCA</span><br><span class="line">pca = PCA(n_components=<span class="number">5</span>)</span><br><span class="line">Xd = pca.fit_transform(X)</span><br><span class="line">np.set_printoptions(precision=<span class="number">3</span>, suppress=<span class="literal">True</span>)</span><br><span class="line">pca.explained_variance_ratio_</span><br></pre></td></tr></table></figure>
</li>
</ul>
<ul>
<li><p>用PCA算法不好的地方在于，主成分往往是很多特征的复杂组合，理解起来很困难。</p>
</li>
<li><p>PCA的一个优点是可以将抽象的数据集绘制成图像，如将前两个特征做成图形。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line">%matplotlib inline</span><br><span class="line"><span class="keyword">from</span> matplotlib <span class="keyword">import</span> pyplot <span class="keyword">as</span> plt</span><br><span class="line">classes = set(y)</span><br><span class="line">colors = [<span class="string">'red'</span>, <span class="string">'green'</span>]</span><br><span class="line"><span class="keyword">for</span> cur_class, color <span class="keyword">in</span> zip(classes, colors):</span><br><span class="line">    mask = (y == cur_class).values</span><br><span class="line">    plt.scatter(Xd[mask,<span class="number">0</span>], Xd[mask,<span class="number">1</span>], marker=<span class="string">'o'</span>, color=color, label=int(cur_class))</span><br><span class="line">plt.legend()</span><br><span class="line">plt.show()</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="5-4-创建自己的转换器"><a href="#5-4-创建自己的转换器" class="headerlink" title="5.4 创建自己的转换器"></a>5.4 创建自己的转换器</h3><ul>
<li><p>导入TransformerMinxin类，重写其中的fit、transform函数。使用as_float_array判断输入类型是否为float。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.base <span class="keyword">import</span> TransformerMixin</span><br><span class="line"><span class="keyword">from</span> sklearn.utils <span class="keyword">import</span> as_float_array</span><br><span class="line"><span class="class"><span class="keyword">class</span> <span class="title">MeanDiscrete</span><span class="params">(TransformerMixin)</span>:</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">fit</span><span class="params">(self, X)</span>:</span></span><br><span class="line">        X = as_float_array(X)</span><br><span class="line">        self.mean = X.mean(axis=<span class="number">0</span>)</span><br><span class="line">        <span class="keyword">return</span> self</span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">transform</span><span class="params">(self, X)</span>:</span></span><br><span class="line">        X = as_float_array(X)</span><br><span class="line">        <span class="keyword">assert</span> X.shape[<span class="number">1</span>] == self.mean.shape[<span class="number">0</span>]</span><br><span class="line">        <span class="keyword">return</span> X &gt; self.mean</span><br><span class="line">mean_discrete = MeanDiscrete()</span><br><span class="line">X_mean = mean_discrete.fit_transform(X)</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h2 id="第六章-使用朴素贝叶斯进行社会媒体挖掘"><a href="#第六章-使用朴素贝叶斯进行社会媒体挖掘" class="headerlink" title="第六章 使用朴素贝叶斯进行社会媒体挖掘"></a>第六章 使用朴素贝叶斯进行社会媒体挖掘</h2><h3 id="6-1-消歧"><a href="#6-1-消歧" class="headerlink" title="6.1 消歧"></a>6.1 消歧</h3><ul>
<li>朴素贝叶斯：朴素是因为假设了各特征之间是相互独立的。</li>
<li>文本挖掘的一个难点在于歧义，如bank一词指的是河岸还是银行，消除歧义被称为消歧。</li>
<li>jupyter中用<code>%%javascript</code>表示该代码段为JavaScript语言。</li>
<li>只有在相同的测试集上，在相同的条件下进行测试，才能比较算法的优劣。</li>
</ul>
<h3 id="6-2-文本转换器"><a href="#6-2-文本转换器" class="headerlink" title="6.2 文本转换器"></a>6.2 文本转换器</h3><ul>
<li>一种简单但高效的文本测量方法是统计数据集中每个单词出现的次数。</li>
<li><code>from collections import Counter</code>能计算列表中各个元素出现的次数，用<code>c.most_common(5)</code>输出出现次数最多的5个词。</li>
<li>词袋模型分为三种：<ul>
<li>用词语实际出现的次数作为词频。缺点是当文档长度差异明显时，词频差距会很大。</li>
<li>使用归一化后的词频，每篇文档中词频和为1，规避了文档长度对词频的影响。</li>
<li>用二值特征表示，出现为1，不出现为0。</li>
<li>词频-逆文档频率法（tf-idf）：用词频代替词的出现次数，词频除以包含该词的文档数。</li>
</ul>
</li>
<li>N元语法是指由几个连续的词组成的子序列。会导致特征矩阵变得更稀疏。另一种是字符N元语法，用于发现拼写错误。</li>
</ul>
<h3 id="6-3-朴素贝叶斯"><a href="#6-3-朴素贝叶斯" class="headerlink" title="6.3 朴素贝叶斯"></a>6.3 朴素贝叶斯</h3><ul>
<li><p>在贝叶斯统计学中，使用数据来描述模型，而不是用模型描述数据。频率论者则使用数据证实假设的模型。</p>
</li>
<li><p>贝叶斯定理公式如下：<br>$$<br>P(A|B)=\frac{P(B|A)P(A)}{P(B)}<br>$$<br>比较后验概率大小时，只需计算分子并比较大小。</p>
</li>
</ul>
<h3 id="6-4-应用"><a href="#6-4-应用" class="headerlink" title="6.4 应用"></a>6.4 应用</h3><ul>
<li><p>NLTK的word_tokenize函数将原始文档转换为由单词和其是否出现的字典。NLTK与转换器接口不一致，因此要创建包含fit和transform的转换器。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line"><span class="class"><span class="keyword">class</span> <span class="title">NLTKBOW</span><span class="params">(TransformerMixin)</span>:</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">fit</span><span class="params">(self, X, y=None)</span>:</span></span><br><span class="line">        <span class="keyword">return</span> self</span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">transform</span><span class="params">(self, X)</span>:</span></span><br><span class="line">        <span class="keyword">return</span> [&#123;word: <span class="literal">True</span> <span class="keyword">for</span> word <span class="keyword">in</span> work_tokenize(document) <span class="keyword">for</span> document <span class="keyword">in</span> X&#125;]</span><br></pre></td></tr></table></figure>
</li>
<li><p><code>from sklearn.feature_extraction import DictVectorizer</code>接收元素为字典的列表，将其转换为矩阵。</p>
</li>
<li><p><code>from sklearn.naive_bayes import BernoulliNB</code>引入二值分类的朴素贝叶斯分类器。</p>
</li>
<li><p>组合部件，创建流水线。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.pipeline <span class="keyword">import</span> Pipeline</span><br><span class="line">pipeline = Pipeline([(<span class="string">'bag-of-words'</span>, NLTKBOW()),</span><br><span class="line">                    (<span class="string">'vectorizer'</span>, DictVectorizer()),</span><br><span class="line">                    (<span class="string">'naive-bayes'</span>, BernoulliNB())</span><br><span class="line">                    ])</span><br></pre></td></tr></table></figure>
</li>
<li><p>正确率对于不均匀的数据集来说，并不能反映算法的优劣。更常用的指标为F1值。</p>
</li>
<li><p>F1值是以每个类别为基础进行定义的。包括两大概念：</p>
<ul>
<li>准确率：预测结果属于某一类的个体，实际属于该类的比例。</li>
<li>召回率：被正确预测为某类的个体数量与数据集中该类个体总数的比例。</li>
</ul>
</li>
<li><p>在案例中就是：</p>
<ul>
<li>正确率：在所有被预测为相关的消息中真正相关的占比多少？</li>
<li>召回率：数据集所有相关的消息中，由多少被正确识别为相关？</li>
</ul>
</li>
<li><p>F1值是正确率和召回率的调和平均数。<br>$$<br>F1=2·\frac{precision·recall}{precision+recall}<br>$$</p>
</li>
<li><p><code>scores=cross_val_score(pipeline, tweets, labels, scoring=&#39;f1&#39;)</code>交叉验证法计算F1得分。</p>
</li>
<li><p><code>nb = model.named_steps(&#39;naive-bayes&#39;)</code>访问流水线的每个步骤。</p>
</li>
<li><p>当概率较小时，可以使用对数概率，防止下溢。</p>
</li>
<li><p><code>np.argsort()</code>进行降序排列。</p>
</li>
<li><p>DictVectorizer保存了特征的名称，可搜索其feature_names_属性查找。</p>
</li>
</ul>
<h2 id="第七章-用图挖掘找到感兴趣的人"><a href="#第七章-用图挖掘找到感兴趣的人" class="headerlink" title="第七章 用图挖掘找到感兴趣的人"></a>第七章 用图挖掘找到感兴趣的人</h2><h3 id="7-1-加载数据集"><a href="#7-1-加载数据集" class="headerlink" title="7.1 加载数据集"></a>7.1 加载数据集</h3><ul>
<li><p>初始化twitter连接实例。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">import</span> twitter</span><br><span class="line">consumer_key = <span class="string">"52Nu7ubm2szT1JyJEOB7V2lGM"</span></span><br><span class="line">consumer_secret = <span class="string">"mqA94defqjioyWeMxdJsSduthxdMMGd2vfOUKvOFpm0n7JTqfY"</span></span><br><span class="line">access_token = <span class="string">"16065520-USf3DBbQAh6ZA8CnSAi6NAUlkorXdppRXpC4cQCKk"</span></span><br><span class="line">access_token_secret = <span class="string">"DowMQeXqh5ZsGvZGrmUmkI0iCmI34ShFzKF3iOdiilpX5"</span></span><br><span class="line">authorization = twitter.OAuth(access_token, access_token_secret, consumer_key, consumer_secret)</span><br><span class="line">t = twitter.Twitter(auth=authorization, retry=<span class="literal">True</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p><code>search_results = t.search.tweets(q=&quot;python&quot;, count=100)[&#39;statuses&#39;]</code>搜索包含关键词的推文。</p>
</li>
<li><p>导入joblib库，保存模型。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.externals <span class="keyword">import</span> joblib</span><br><span class="line">model_filename = os.path.join(os.path.expanduser(<span class="string">"~"</span>), <span class="string">"Models"</span>, <span class="string">"twitter"</span>, <span class="string">"python_context.pkl"</span>)</span><br><span class="line">joblib.dump(model, output_filename)</span><br></pre></td></tr></table></figure>
</li>
<li><p>定制的类无法直接用joblib加载，因此要重建NLTKBOW。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.base <span class="keyword">import</span> TransformerMixin</span><br><span class="line"><span class="keyword">from</span> nltk <span class="keyword">import</span> word_tokenize</span><br><span class="line"></span><br><span class="line"><span class="class"><span class="keyword">class</span> <span class="title">NLTKBOW</span><span class="params">(TransformerMixin)</span>:</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">fit</span><span class="params">(self, X, y=None)</span>:</span></span><br><span class="line">        <span class="keyword">return</span> self</span><br><span class="line"></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">transform</span><span class="params">(self, X)</span>:</span></span><br><span class="line">        <span class="keyword">return</span> [&#123;word: <span class="literal">True</span> <span class="keyword">for</span> word <span class="keyword">in</span> word_tokenize(document)&#125;</span><br><span class="line">                 <span class="keyword">for</span> document <span class="keyword">in</span> X]</span><br></pre></td></tr></table></figure>
</li>
<li><p>调用joblib的load函数加载模型。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">context_classifier = joblib.load(model_filename)</span><br></pre></td></tr></table></figure>
</li>
<li><p><code>results = t.friends.ids(user_id=user_id, cursor=cursor, count=5000)</code>从推特获取用户关注的好友编号列表，一页5000，使用游标表示第几页，初始设为-1，不为0表示有下一页。返回一个字典，包含’ids’和下一个游标’next_cursor’。</p>
</li>
<li><p><code>friends.extend([friends for friends in results[&#39;ids&#39;]])</code>列表扩展，接收参数为一个列表。</p>
</li>
<li><p><code>sys.stdout.flush()</code>输出缓存到屏幕，避免过长等待。</p>
</li>
<li><p>完整的获取好友函数代码：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">get_friends</span><span class="params">(t, user_id)</span>:</span></span><br><span class="line">    friends = []</span><br><span class="line">    cursor = <span class="number">-1</span>  <span class="comment"># Start with the first page</span></span><br><span class="line">    <span class="keyword">while</span> cursor != <span class="number">0</span>:  <span class="comment"># If zero, that is the end:</span></span><br><span class="line">        <span class="keyword">try</span>:</span><br><span class="line">            results = t.friends.ids(user_id=user_id, cursor=cursor, count=<span class="number">5000</span>)</span><br><span class="line">            friends.extend([friends <span class="keyword">for</span> friends <span class="keyword">in</span> results[<span class="string">'ids'</span>]])</span><br><span class="line">            cursor = results[<span class="string">'next_cursor'</span>]</span><br><span class="line">            <span class="keyword">if</span> len(friends) &gt;= <span class="number">10000</span>:</span><br><span class="line">                <span class="keyword">break</span></span><br><span class="line">            <span class="keyword">if</span> cursor != <span class="number">0</span>:</span><br><span class="line">                print(<span class="string">"Collected &#123;&#125; friends so far, but there are more"</span>.format(len(friends)))</span><br><span class="line">                sys.stdout.flush</span><br><span class="line">        <span class="keyword">except</span> TypeError <span class="keyword">as</span> e:</span><br><span class="line">            <span class="keyword">if</span> results <span class="keyword">is</span> <span class="literal">None</span>:</span><br><span class="line">                print(<span class="string">"You probably reached your API limit, waiting for 5 minutes"</span>)</span><br><span class="line">                sys.stdout.flush()</span><br><span class="line">                time.sleep(<span class="number">5</span>*<span class="number">60</span>) <span class="comment"># 5 minute wait</span></span><br><span class="line">            <span class="keyword">else</span>:</span><br><span class="line">                <span class="keyword">raise</span> e</span><br><span class="line">        <span class="keyword">except</span> twitter.TwitterHTTPError <span class="keyword">as</span> e:</span><br><span class="line">            <span class="keyword">break</span></span><br><span class="line">        <span class="keyword">finally</span>:</span><br><span class="line">            time.sleep(<span class="number">60</span>)  <span class="comment"># Wait 1 minute before continuing</span></span><br><span class="line">    <span class="keyword">return</span> friends</span><br></pre></td></tr></table></figure>
</li>
<li><p>为加快网络构建，从现有用户-好友列表字典中计算每个好友的出现次数，降序排列，依次查看该好友是否已被查找，找到排名最高的未被查找的好友，进行搜索，更新字典，以此类推。</p>
</li>
<li><p>使用json保存/加载好友字典：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">import</span> json</span><br><span class="line">friends_filename = os.path.join(data_folder, <span class="string">"python_friends.json"</span>)</span><br><span class="line"><span class="keyword">with</span> open(friends_filename, <span class="string">'w'</span>) <span class="keyword">as</span> outf:</span><br><span class="line">    json.dump(friends, outf)</span><br><span class="line"><span class="keyword">with</span> open(friends_filename) <span class="keyword">as</span> inf:</span><br><span class="line">    friends = json.load(inf)</span><br></pre></td></tr></table></figure>
</li>
<li><p>可以用Networkx库实现图关系的可视化。</p>
<ul>
<li><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 引入Networkx库，创建有向图</span></span><br><span class="line"><span class="keyword">import</span> networkx <span class="keyword">as</span> nx</span><br><span class="line">G = nx.DiGraph()</span><br></pre></td></tr></table></figure>
</li>
<li><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 添加顶点</span></span><br><span class="line">main_users = friends.keys()</span><br><span class="line">G.add_nodes_from(main_users)</span><br></pre></td></tr></table></figure>
</li>
<li><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 添加边</span></span><br><span class="line"><span class="keyword">for</span> user_id <span class="keyword">in</span> friends:</span><br><span class="line">    <span class="keyword">for</span> friend <span class="keyword">in</span> friends[user_id]:</span><br><span class="line">        <span class="keyword">if</span> friend <span class="keyword">in</span> main_users:</span><br><span class="line">           G.add_edge(user_id, friend)</span><br></pre></td></tr></table></figure>
</li>
<li><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 绘图</span></span><br><span class="line">%matplotlib inline</span><br><span class="line">nx.draw(G)</span><br></pre></td></tr></table></figure>
</li>
<li><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 使用plt放大图像</span></span><br><span class="line"><span class="keyword">from</span> matplotlib <span class="keyword">import</span> pyplot <span class="keyword">as</span> plt</span><br><span class="line">plt.figure(<span class="number">3</span>,figsize=(<span class="number">40</span>,<span class="number">40</span>))</span><br><span class="line">nx.draw(G, alpha=<span class="number">0.1</span>, edge_color=<span class="string">'b'</span>, node_color=<span class="string">'g'</span>, node_size=<span class="number">2000</span>)</span><br><span class="line">plt.axis(<span class="string">'on'</span>)</span><br><span class="line">plt.xlim(<span class="number">0.45</span>, <span class="number">0.55</span>)</span><br><span class="line">plt.ylim(<span class="number">0.45</span>, <span class="number">0.55</span>)</span><br></pre></td></tr></table></figure>
</li>
</ul>
</li>
<li><p>杰卡德相似系数：两个集合交集的元素数量除以两个集合并集的元素数量，范围为0到1，代表两者的重合比例。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">compute_similarity</span><span class="params">(friends1, friends2)</span>:</span></span><br><span class="line">    <span class="keyword">return</span> len(friends1 &amp; friends2) / len(friends1 | friends2)</span><br></pre></td></tr></table></figure>
</li>
<li><p>创建带杰卡德相似系数权重无向图：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">create_graph</span><span class="params">(followers, threshold=<span class="number">0</span>)</span>:</span></span><br><span class="line">    G = nx.Graph()</span><br><span class="line">    <span class="keyword">for</span> user1 <span class="keyword">in</span> friends.keys():</span><br><span class="line">        <span class="keyword">for</span> user2 <span class="keyword">in</span> friends.keys():</span><br><span class="line">            <span class="keyword">if</span> user1 == user2:</span><br><span class="line">                <span class="keyword">continue</span></span><br><span class="line">            weight = compute_similarity(friends[user1], friends[user2])</span><br><span class="line">            <span class="keyword">if</span> weight &gt;= threshold:</span><br><span class="line">                G.add_node(user1)</span><br><span class="line">                G.add_node(user2)</span><br><span class="line">                G.add_edge(user1, user2, weight=weight)</span><br><span class="line">    <span class="keyword">return</span> G</span><br></pre></td></tr></table></figure>
</li>
<li><p>networkx中布局方式决定顶点和边的位置，常用布局方式有spring_layout，circular_layout，random_layout，shell_layout和spectral_layout。</p>
</li>
<li><p>根据布局方式，依次绘制顶点和边，获取权重数据。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line">plt.figure(figsize=(<span class="number">10</span>,<span class="number">10</span>))</span><br><span class="line">pos = nx.spring_layout(G)</span><br><span class="line">nx.draw_networkx_nodes(G, pos)</span><br><span class="line">edgewidth = [ d[<span class="string">'weight'</span>] <span class="keyword">for</span> (u,v,d) <span class="keyword">in</span> G.edges(data=<span class="literal">True</span>)]</span><br><span class="line">nx.draw_networkx_edges(G, pos, width=edgewidth)</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="7-2-寻找子图"><a href="#7-2-寻找子图" class="headerlink" title="7.2 寻找子图"></a>7.2 寻找子图</h3><ul>
<li><p>聚类分析：找出相似用户群，向他们定向投放广告。</p>
</li>
<li><p>聚类分析的复杂之处在于：</p>
<ul>
<li>缺乏评价结果的标准</li>
<li>没有事先标注的数据进行训练，得到的是近似的分组结果，而不是明确的分类。</li>
</ul>
</li>
<li><p>连通分支是图中由边连接在一起的一组顶点，不要求顶点两两相连，但任意两个顶点之间存在一条路径。连通分支的计算不考虑权重，只考虑边是否存在。</p>
</li>
<li><p>用networkx的函数寻找连通分支。<code>sub_graphs = nx.connected_component_subgraphs(G)</code></p>
</li>
<li><p>画出连通分支图。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">sub_graphs = nx.connected_component_subgraphs(G)</span><br><span class="line">nx.draw(list(sub_graphs)[<span class="number">6</span>])</span><br></pre></td></tr></table></figure>
</li>
<li><p><code>n_subgraphs = nx.number_connected_components(G)</code>计算连通分支数量。</p>
</li>
<li><p>画出所有连通分支。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br></pre></td><td class="code"><pre><span class="line">sub_graphs = nx.connected_component_subgraphs(G)</span><br><span class="line">n_subgraphs = nx.number_connected_components(G)</span><br><span class="line">fig = plt.figure(figsize=(<span class="number">20</span>, (n_subgraphs * <span class="number">2</span>)))</span><br><span class="line"><span class="keyword">for</span> i, sub_graph <span class="keyword">in</span> enumerate(sub_graphs):</span><br><span class="line">    ax = fig.add_subplot(int(n_subgraphs / <span class="number">2</span>), <span class="number">2</span>, i)</span><br><span class="line">    ax.get_xaxis().set_visible(<span class="literal">False</span>)</span><br><span class="line">    ax.get_yaxis().set_visible(<span class="literal">False</span>)</span><br><span class="line">    pos = nx.spring_layout(G)</span><br><span class="line">    nx.draw_networkx_nodes(G, pos, sub_graph.nodes(), ax=ax, node_size=<span class="number">500</span>)</span><br><span class="line">    nx.draw_networkx_edges(G, pos, sub_graph.edges(), ax=ax)</span><br></pre></td></tr></table></figure>
</li>
<li><p>聚类应使得：</p>
<ul>
<li>同一簇内的个体尽可能相似</li>
<li>不同簇内的个体尽可能不相似</li>
</ul>
</li>
<li><p>轮廓系数：<br>$$<br>s=\frac{b-a}{max(a,b)}<br>$$<br>a为簇内距离，表示与簇内其他个体之间的平均距离。b为簇间距离，也就是与最近簇内个体之间的平均距离。</p>
</li>
<li><p>总轮廓系数是每个个体轮廓系数的均值。接近1时，表示簇内相似度高，簇间很远；接近0时，表示所有簇重合在一起，簇间距离很小；接近-1时，表示个体分在错误的簇内。</p>
</li>
<li><p><code>from sklearn.metrics import silhouette_score</code>计算轮廓系数。</p>
</li>
<li><p>轮廓函数的定义要求至少有两个顶点，两个连通分支。</p>
</li>
<li><p>轮廓系数函数接收距离矩阵，因此要将图转换为距离矩阵。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">X = nx.to_scipy_sparse_matrix(G).todense()</span><br></pre></td></tr></table></figure>
</li>
<li><p>对于稀疏矩阵，应使用V-MEASURE或调整互信息进行评价。</p>
</li>
<li><p><code>silhouette_score(X, labels, metric=&#39;precomputed&#39;)</code>指定metric，避免X被认为是特征矩阵，而不是距离矩阵。</p>
</li>
<li><p>使用<code>from scipy.optimize import minimize</code>自动调整参数优化。要求变量在其他参数前面，即：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">compute_silhouette</span><span class="params">(threshold, friends)</span>:</span></span><br><span class="line">    G = create_graph(friends, threshold=threshold)</span><br><span class="line">    <span class="keyword">if</span> len(G.nodes()) == <span class="number">0</span>:</span><br><span class="line">        <span class="keyword">return</span> <span class="number">-99</span>  <span class="comment"># Invalid graph</span></span><br><span class="line">    sub_graphs = nx.connected_component_subgraphs(G)</span><br><span class="line">    <span class="keyword">if</span> <span class="keyword">not</span> (<span class="number">2</span> &lt;= nx.number_connected_components(G) &lt; len(G.nodes()) - <span class="number">1</span>):</span><br><span class="line">        <span class="keyword">return</span> <span class="number">-99</span>  <span class="comment"># Invalid number of components, Silhouette not defined</span></span><br><span class="line">    label_dict = &#123;&#125;</span><br><span class="line">    <span class="keyword">for</span> i, sub_graph <span class="keyword">in</span> enumerate(sub_graphs):</span><br><span class="line">        <span class="keyword">for</span> node <span class="keyword">in</span> sub_graph.nodes():</span><br><span class="line">            label_dict[node] = i</span><br><span class="line">    labels = np.array([label_dict[node] <span class="keyword">for</span> node <span class="keyword">in</span> G.nodes()])</span><br><span class="line">    X = nx.to_scipy_sparse_matrix(G).todense()</span><br><span class="line">    X = <span class="number">1</span> - X</span><br><span class="line">    <span class="keyword">return</span> silhouette_score(X, labels, metric=<span class="string">'precomputed'</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>minimize是调整参数使得函数返回值最小，这里要求轮廓系数最大，因此要取反，将其变为损失函数。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">invert</span><span class="params">(func)</span>:</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">inverted_function</span><span class="params">(*args, **kwds)</span>:</span></span><br><span class="line">        <span class="keyword">return</span> -func(*args, **kwds)</span><br><span class="line">    <span class="keyword">return</span> inverted_function</span><br></pre></td></tr></table></figure>
</li>
<li><p>设定初始阈值为0.1，设定优化方法为下山单纯形法，设定被参数参数为friends字典，设定最大迭代次数为10.</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">result = minimize(invert(compute_silhouette), <span class="number">0.1</span>, method=<span class="string">'nelder-mead'</span>, args=(friends,), options=&#123;<span class="string">'maxiter'</span>:<span class="number">10</span>, &#125;)</span><br></pre></td></tr></table></figure>
<p>返回结果中的x的最佳阈值大小。</p>
</li>
</ul>
<h2 id="第八章-用神经网络破解验证码"><a href="#第八章-用神经网络破解验证码" class="headerlink" title="第八章 用神经网络破解验证码"></a>第八章 用神经网络破解验证码</h2><h3 id="8-1-人工神经网络"><a href="#8-1-人工神经网络" class="headerlink" title="8.1 人工神经网络"></a>8.1 人工神经网络</h3><ul>
<li><p>神经网络由一系列相互连接的神经元组成，每个神经元都是一个简单的函数，接收一定输入，给出相应输出。</p>
</li>
<li><p>神经元中用于处理数据的标准函数被称为激活函数。</p>
</li>
<li><p>激活函数应是可导和光滑的。常用的激活函数，如逻辑斯蒂函数：<br>$$<br>f(x)=\frac{L}{1+e^{-k(x-x_0)}}<br>$$</p>
</li>
<li><p>全连接层：上一层每个神经元的输出都输入到下一层的所有神经元。</p>
</li>
<li><p>边的权重开始时通常是随机选取的，训练过程中再逐步更新。</p>
</li>
</ul>
<h3 id="8-2-创建数据集"><a href="#8-2-创建数据集" class="headerlink" title="8.2 创建数据集"></a>8.2 创建数据集</h3><ul>
<li><p>用PIL库的Image初始化图像对象，ImageDraw初始化绘图对象，ImageFont初始化字体对象。用skimage的transform进行图像错切变化。返回归一化结果。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">create_captcha</span><span class="params">(text, shear=<span class="number">0</span>, size=<span class="params">(<span class="number">100</span>,<span class="number">24</span>)</span>)</span>:</span></span><br><span class="line">    im = Image.new(<span class="string">"L"</span>, size, <span class="string">"black"</span>)</span><br><span class="line">    draw = ImageDraw.Draw(im)</span><br><span class="line">    font = ImageFont.truetype(<span class="string">r"Coval.otf"</span>, <span class="number">22</span>)</span><br><span class="line">    draw.text((<span class="number">2</span>, <span class="number">2</span>), text, fill=<span class="number">1</span>, font=font)</span><br><span class="line">    image = np.array(im)</span><br><span class="line">    affine_tf = tf.AffineTransform(shear=shear)</span><br><span class="line">    image = tf.warp(image, affine_tf)</span><br><span class="line">    <span class="keyword">return</span> image / image.max()</span><br></pre></td></tr></table></figure>
</li>
<li><p>skimage中的label函数能找出图像中像素值相同且连接在一起的像素块。输入输出均为图像数组，返回的数组中，连接在一起的区域是大于0的值，每个区域的值不同，其他区域为0值。</p>
</li>
<li><p>skimage的regionprops能抽取连续区域，属性.bbox返回区域的起始结束横纵坐标。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">segment_image</span><span class="params">(image)</span>:</span></span><br><span class="line">    labeled_image = label(image &gt; <span class="number">0</span>)</span><br><span class="line">    subimages = []</span><br><span class="line">    <span class="keyword">for</span> region <span class="keyword">in</span> regionprops(labeled_image):</span><br><span class="line">        start_x, start_y, end_x, end_y = region.bbox</span><br><span class="line">        subimages.append(image[start_x:end_x, start_y:end_y])</span><br><span class="line">    <span class="keyword">if</span> len(subimages) == <span class="number">0</span>:</span><br><span class="line">        <span class="keyword">return</span> [image,]</span><br><span class="line">    <span class="keyword">return</span> subimages</span><br></pre></td></tr></table></figure>
</li>
<li><p>利用subplots返回的坐标起点，画图。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">f, axes = plt.subplots(<span class="number">1</span>, len(subimages), figsize=(<span class="number">10</span>, <span class="number">3</span>))</span><br><span class="line"><span class="keyword">for</span> i <span class="keyword">in</span> range(len(subimages)):</span><br><span class="line">    axes[i].imshow(subimages[i], cmap=<span class="string">"gray"</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>使用<code>from sklearn.utils import check_random_state</code>随机选取字母和错切值。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">generate_sample</span><span class="params">(random_state=None)</span>:</span></span><br><span class="line">    random_state = check_random_state(random_state)</span><br><span class="line">    letter = random_state.choice(letters)</span><br><span class="line">    shear = random_state.choice(shear_values)</span><br><span class="line">    <span class="keyword">return</span> create_captcha(letter, shear=shear, size=(<span class="number">20</span>, <span class="number">20</span>)), letters.index(letter)</span><br></pre></td></tr></table></figure>
</li>
<li><p>用zip函数将3000次采样数据组合。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">dataset, targets = zip(*(generate_sample(random_state) <span class="keyword">for</span> i <span class="keyword">in</span> range(<span class="number">3000</span>)))</span><br></pre></td></tr></table></figure>
</li>
<li><p>使用一位有效码编码使得单个神经元有26个输出，为1则是该字母，否则为0。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.preprocessing <span class="keyword">import</span> OneHotEncoder</span><br><span class="line">onehot = OneHotEncoder()</span><br><span class="line">y = onehot.fit_transform(targets.reshape(targets.shape[<span class="number">0</span>],<span class="number">1</span>))</span><br></pre></td></tr></table></figure>
</li>
<li><p>用todense将稀疏矩阵转换为密集矩阵。</p>
</li>
<li><p>使用<code>from skimage.transform import resize</code>改变图像大小，将不规整的图像调到相同大小。</p>
</li>
<li><p><code>X = dataset.reshape((dataset.shape[0], dataset.shape[1] *dataset.shape[2]))</code>将数组扁平化为二维。</p>
</li>
</ul>
<h3 id="8-3-训练和分类"><a href="#8-3-训练和分类" class="headerlink" title="8.3 训练和分类"></a>8.3 训练和分类</h3><ul>
<li><p>使用pybrain进行神经网络的构建，pybrain有自己的数据格式，转换数据格式。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> pybrain.datasets <span class="keyword">import</span> SupervisedDataSet</span><br><span class="line">training = SupervisedDataSet(X.shape[<span class="number">1</span>], y.shape[<span class="number">1</span>])</span><br><span class="line"><span class="keyword">for</span> i <span class="keyword">in</span> range(X_train.shape[<span class="number">0</span>]):</span><br><span class="line">    training.addSample(X_train[i], y_train[i])</span><br></pre></td></tr></table></figure>
</li>
<li><p>隐含层的神经元过多会导致过拟合，过少会导致低拟合。</p>
</li>
<li><p>导入buildNetwork函数，指定维度，创建神经网络。第一二三个参数分别为三层网络神经元的数量。激活偏执神经元。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> pybrain.tools.shortcuts <span class="keyword">import</span> buildNetwork</span><br><span class="line">net = buildNetwork(X.shape[<span class="number">1</span>], <span class="number">100</span>, y.shape[<span class="number">1</span>], bias=<span class="literal">True</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>反向传播算法从输出层开始，向上层查找预测错误的神经元，微调这些神经元输入值的权重，以达到修复输出错误的目的。</p>
</li>
<li><p>微调的幅度取决于神经元各边权重的偏导数和学习速率。计算出函数误差的梯度乘以学习速率，就是原权重需要下调的幅度。</p>
</li>
<li><p>有些情况下，修正的结果仅是局部最优。</p>
</li>
<li><p>反向传播算法，限定反向传播次数为20：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> pybrain.supervised <span class="keyword">import</span> BackpropTrainer</span><br><span class="line">trainer = BackpropTrainer(net, training, learningrate=<span class="number">0.01</span>, weightdecay=<span class="number">0.01</span>)</span><br><span class="line">trainer.trainEpochs(epochs=<span class="number">20</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>在trainer上调用testOnClassData函数，预测分类结果。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">predictions = trainer.testOnClassData(dataset=testing)</span><br></pre></td></tr></table></figure>
</li>
<li><p>输入验证码图片，预测验证码。用activate函数激活神经网络，输入化为一维的子图片数据。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">predict_captcha</span><span class="params">(captcha_image, neural_network)</span>:</span></span><br><span class="line">    subimages = segment_image(captcha_image)</span><br><span class="line">    predicted_word = <span class="string">""</span></span><br><span class="line">    <span class="keyword">for</span> subimage <span class="keyword">in</span> subimages:</span><br><span class="line">        subimage = resize(subimage, (<span class="number">20</span>, <span class="number">20</span>))</span><br><span class="line">        outputs = net.activate(subimage.flatten())</span><br><span class="line">        prediction = np.argmax(outputs)</span><br><span class="line">        predicted_word += letters[prediction]</span><br><span class="line">    <span class="keyword">return</span> predicted_word</span><br></pre></td></tr></table></figure>
</li>
<li><p>从nltk预料库中下载words语料库，从中找出长度为4的单词，并大写化。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">import</span> nltk</span><br><span class="line">nltk.download(<span class="string">'words'</span>)</span><br><span class="line">valid_words = [word.upper() <span class="keyword">for</span> word <span class="keyword">in</span> words.words() <span class="keyword">if</span> len(word) == <span class="number">4</span>]</span><br></pre></td></tr></table></figure>
</li>
<li><p>用二维混淆矩阵表现预测的正确率和召回率。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.metrics <span class="keyword">import</span> confusion_matrix</span><br><span class="line">cm = confusion_matrix(np.argmax(y_test, axis=<span class="number">1</span>), predictions)</span><br></pre></td></tr></table></figure>
</li>
<li><p>画出混淆矩阵，并标出坐标。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line">plt.figure(figsize=(<span class="number">20</span>,<span class="number">20</span>))</span><br><span class="line">tick_marks = np.arange(len(letters))</span><br><span class="line">plt.xticks(tick_marks, letters)</span><br><span class="line">plt.yticks(tick_marks, letters)</span><br><span class="line">plt.ylabel(<span class="string">'Actual'</span>)</span><br><span class="line">plt.xlabel(<span class="string">'Predicted'</span>)</span><br><span class="line">plt.imshow(cm, cmap=<span class="string">"Blues"</span>)</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="8-4-用词典提升正确率"><a href="#8-4-用词典提升正确率" class="headerlink" title="8.4 用词典提升正确率"></a>8.4 用词典提升正确率</h3><ul>
<li><p>先检查以下词典内是否包含该单词，包含则直接输出，否则查找相似的单词，作为更新过的预测结果返回。</p>
</li>
<li><p>列文斯坦编辑距离适用于确定两个短字符串的相似度，计算一个单词变成另一个单词的步骤数，步骤数越少越相似。以下操作算一步：</p>
<ul>
<li>在单词的任意位置插入一个字母</li>
<li>从单词中删除任意一个字母</li>
<li>把一个字母替换为另一个字母</li>
</ul>
</li>
<li><p>nltk中实现了编辑距离算法。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> nltk.metrics <span class="keyword">import</span> edit_distance</span><br><span class="line">steps = edit_distance(<span class="string">"STEP"</span>, <span class="string">"STOP"</span>)</span><br><span class="line">print(<span class="string">"The number of steps needed is: &#123;0&#125;"</span>.format(steps))</span><br></pre></td></tr></table></figure>
</li>
<li><p>在字符串等长的情况下，另一种方法是直接计算相同位置不相同的字符数。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">compute_distance</span><span class="params">(prediction, word)</span>:</span></span><br><span class="line">    <span class="keyword">return</span> len(prediction) - sum(prediction[i] == word[i] <span class="keyword">for</span> i <span class="keyword">in</span> range(len(prediction)))</span><br></pre></td></tr></table></figure>
</li>
<li><p>改进后的预测函数：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> operator <span class="keyword">import</span> itemgetter</span><br><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">improved_prediction</span><span class="params">(word, net, dictionary, shear=<span class="number">0.2</span>)</span>:</span></span><br><span class="line">    captcha = create_captcha(word, shear=shear)</span><br><span class="line">    prediction = predict_captcha(captcha, net)</span><br><span class="line">    prediction = prediction[:<span class="number">4</span>]</span><br><span class="line">    <span class="keyword">if</span> prediction <span class="keyword">not</span> <span class="keyword">in</span> dictionary:</span><br><span class="line">        distances = sorted([(word, compute_distance(prediction, word))</span><br><span class="line">                            <span class="keyword">for</span> word <span class="keyword">in</span> dictionary],</span><br><span class="line">                           key=itemgetter(<span class="number">1</span>))</span><br><span class="line">        best_word = distances[<span class="number">0</span>]</span><br><span class="line">        prediction = best_word[<span class="number">0</span>]</span><br><span class="line">    <span class="keyword">return</span> word == prediction, word, prediction</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h2 id="第九章-作者归属问题"><a href="#第九章-作者归属问题" class="headerlink" title="第九章 作者归属问题"></a>第九章 作者归属问题</h2><h3 id="9-1-为作品找作者"><a href="#9-1-为作品找作者" class="headerlink" title="9.1 为作品找作者"></a>9.1 为作品找作者</h3><ul>
<li><p>作者分析的目标是只根据作品内容找出作者独有的特点，作者分析包括以下问题：</p>
<ul>
<li>作者归属：从一组可能的作者中找到文档真正的主人。</li>
<li>作者画像：根据作品界定作者的年龄、性别或其他特征。</li>
<li>作者验证：根据作者已有作品，推断其他作品是否也是他写的。</li>
<li>作者聚类：用聚类分析方法把作品按照作者进行分类。</li>
</ul>
</li>
<li><p>作者归属问题中，已知一部分作者，训练集为多个作者的作品，目标是确定一组作者不详的作品是谁写的。如果作者恰好是已知作者，叫封闭问题。否则叫开放问题。</p>
</li>
<li><p>任何数据挖掘问题，若实际类别不在训练集中，则叫开放问题，要给出不属于任何已知类别的提示。</p>
</li>
<li><p>进行作者归属研究，要求：</p>
<ul>
<li>只能使用作品内容</li>
<li>不考虑作品主题，关注单词用法、标点和其他文本特征。</li>
</ul>
</li>
<li><p>文档中有很多噪音，比如作品前的声明文字，因此要删去这些噪音。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">clean_book</span><span class="params">(document)</span>:</span></span><br><span class="line">    lines = document.split(<span class="string">"\n"</span>)</span><br><span class="line">    start= <span class="number">0</span></span><br><span class="line">    end = len(lines)</span><br><span class="line">    <span class="keyword">for</span> i <span class="keyword">in</span> range(len(lines)):</span><br><span class="line">        line = lines[i]</span><br><span class="line">        <span class="keyword">if</span> line.startswith(<span class="string">"*** START OF THIS PROJECT GUTENBERG"</span>):</span><br><span class="line">            start = i + <span class="number">1</span></span><br><span class="line">        <span class="keyword">elif</span> line.startswith(<span class="string">"*** END OF THIS PROJECT GUTENBERG"</span>):</span><br><span class="line">            end = i - <span class="number">1</span></span><br><span class="line">    <span class="keyword">return</span> <span class="string">"\n"</span>.join(lines[start:end])</span><br></pre></td></tr></table></figure>
</li>
<li><p>将文档清理并保存到列表中。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">load_books_data</span><span class="params">(folder=data_folder)</span>:</span></span><br><span class="line">    documents = []</span><br><span class="line">    authors = []</span><br><span class="line">    subfolders = [subfolder <span class="keyword">for</span> subfolder <span class="keyword">in</span> os.listdir(folder)</span><br><span class="line">                  <span class="keyword">if</span> os.path.isdir(os.path.join(folder, subfolder))]</span><br><span class="line">    <span class="keyword">for</span> author_number, subfolder <span class="keyword">in</span> enumerate(subfolders):</span><br><span class="line">        full_subfolder_path = os.path.join(folder, subfolder)</span><br><span class="line">        <span class="keyword">for</span> document_name <span class="keyword">in</span> os.listdir(full_subfolder_path):</span><br><span class="line">            <span class="keyword">with</span> open(os.path.join(full_subfolder_path, document_name)) <span class="keyword">as</span> inf:</span><br><span class="line">                documents.append(clean_book(inf.read()))</span><br><span class="line">                authors.append(author_number)</span><br><span class="line">    <span class="keyword">return</span> documents, np.array(authors, dtype=<span class="string">'int'</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>如果数据集过大，无法一次加载到内存中，要每次从一篇或几篇文档中抽取特征，把特征保存在矩阵中。</p>
</li>
</ul>
<h3 id="9-2-功能词"><a href="#9-2-功能词" class="headerlink" title="9.2 功能词"></a>9.2 功能词</h3><ul>
<li><p>功能词：指本身具有很少含义，却是组成句子必不可少的成分。如this和which。与功能词相对的是实词。</p>
</li>
<li><p>通常来讲，使用越频繁的单词，对于作者分析越能提供更多有价值的信息。</p>
</li>
<li><p>功能词的使用通常不是由文档内容而是由作者的使用习惯所决定的，因此可以用来区分作者归属，如美国人在意区分that和which，而澳大利亚人不在意。</p>
</li>
<li><p>功能词词汇表：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br></pre></td><td class="code"><pre><span class="line">function_words = [<span class="string">"a"</span>, <span class="string">"able"</span>, <span class="string">"aboard"</span>, <span class="string">"about"</span>, <span class="string">"above"</span>, <span class="string">"absent"</span>,</span><br><span class="line">                  <span class="string">"according"</span> , <span class="string">"accordingly"</span>, <span class="string">"across"</span>, <span class="string">"after"</span>, <span class="string">"against"</span>,</span><br><span class="line">                  <span class="string">"ahead"</span>, <span class="string">"albeit"</span>, <span class="string">"all"</span>, <span class="string">"along"</span>, <span class="string">"alongside"</span>, <span class="string">"although"</span>,</span><br><span class="line">                  <span class="string">"am"</span>, <span class="string">"amid"</span>, <span class="string">"amidst"</span>, <span class="string">"among"</span>, <span class="string">"amongst"</span>, <span class="string">"amount"</span>, <span class="string">"an"</span>,</span><br><span class="line">                    <span class="string">"and"</span>, <span class="string">"another"</span>, <span class="string">"anti"</span>, <span class="string">"any"</span>, <span class="string">"anybody"</span>, <span class="string">"anyone"</span>,</span><br><span class="line">                    <span class="string">"anything"</span>, <span class="string">"are"</span>, <span class="string">"around"</span>, <span class="string">"as"</span>, <span class="string">"aside"</span>, <span class="string">"astraddle"</span>,</span><br><span class="line">                    <span class="string">"astride"</span>, <span class="string">"at"</span>, <span class="string">"away"</span>, <span class="string">"bar"</span>, <span class="string">"barring"</span>, <span class="string">"be"</span>, <span class="string">"because"</span>,</span><br><span class="line">                    <span class="string">"been"</span>, <span class="string">"before"</span>, <span class="string">"behind"</span>, <span class="string">"being"</span>, <span class="string">"below"</span>, <span class="string">"beneath"</span>,</span><br><span class="line">                    <span class="string">"beside"</span>, <span class="string">"besides"</span>, <span class="string">"better"</span>, <span class="string">"between"</span>, <span class="string">"beyond"</span>, <span class="string">"bit"</span>,</span><br><span class="line">                    <span class="string">"both"</span>, <span class="string">"but"</span>, <span class="string">"by"</span>, <span class="string">"can"</span>, <span class="string">"certain"</span>, <span class="string">"circa"</span>, <span class="string">"close"</span>,</span><br><span class="line">                    <span class="string">"concerning"</span>, <span class="string">"consequently"</span>, <span class="string">"considering"</span>, <span class="string">"could"</span>,</span><br><span class="line">                    <span class="string">"couple"</span>, <span class="string">"dare"</span>, <span class="string">"deal"</span>, <span class="string">"despite"</span>, <span class="string">"down"</span>, <span class="string">"due"</span>, <span class="string">"during"</span>,</span><br><span class="line">                    <span class="string">"each"</span>, <span class="string">"eight"</span>, <span class="string">"eighth"</span>, <span class="string">"either"</span>, <span class="string">"enough"</span>, <span class="string">"every"</span>,</span><br><span class="line">                    <span class="string">"everybody"</span>, <span class="string">"everyone"</span>, <span class="string">"everything"</span>, <span class="string">"except"</span>, <span class="string">"excepting"</span>,</span><br><span class="line">                    <span class="string">"excluding"</span>, <span class="string">"failing"</span>, <span class="string">"few"</span>, <span class="string">"fewer"</span>, <span class="string">"fifth"</span>, <span class="string">"first"</span>,</span><br><span class="line">                    <span class="string">"five"</span>, <span class="string">"following"</span>, <span class="string">"for"</span>, <span class="string">"four"</span>, <span class="string">"fourth"</span>, <span class="string">"from"</span>, <span class="string">"front"</span>,</span><br><span class="line">                    <span class="string">"given"</span>, <span class="string">"good"</span>, <span class="string">"great"</span>, <span class="string">"had"</span>, <span class="string">"half"</span>, <span class="string">"have"</span>, <span class="string">"he"</span>,</span><br><span class="line">                    <span class="string">"heaps"</span>, <span class="string">"hence"</span>, <span class="string">"her"</span>, <span class="string">"hers"</span>, <span class="string">"herself"</span>, <span class="string">"him"</span>, <span class="string">"himself"</span>,</span><br><span class="line">                    <span class="string">"his"</span>, <span class="string">"however"</span>, <span class="string">"i"</span>, <span class="string">"if"</span>, <span class="string">"in"</span>, <span class="string">"including"</span>, <span class="string">"inside"</span>,</span><br><span class="line">                    <span class="string">"instead"</span>, <span class="string">"into"</span>, <span class="string">"is"</span>, <span class="string">"it"</span>, <span class="string">"its"</span>, <span class="string">"itself"</span>, <span class="string">"keeping"</span>,</span><br><span class="line">                    <span class="string">"lack"</span>, <span class="string">"less"</span>, <span class="string">"like"</span>, <span class="string">"little"</span>, <span class="string">"loads"</span>, <span class="string">"lots"</span>, <span class="string">"majority"</span>,</span><br><span class="line">                    <span class="string">"many"</span>, <span class="string">"masses"</span>, <span class="string">"may"</span>, <span class="string">"me"</span>, <span class="string">"might"</span>, <span class="string">"mine"</span>, <span class="string">"minority"</span>,</span><br><span class="line">                    <span class="string">"minus"</span>, <span class="string">"more"</span>, <span class="string">"most"</span>, <span class="string">"much"</span>, <span class="string">"must"</span>, <span class="string">"my"</span>, <span class="string">"myself"</span>,</span><br><span class="line">                    <span class="string">"near"</span>, <span class="string">"need"</span>, <span class="string">"neither"</span>, <span class="string">"nevertheless"</span>, <span class="string">"next"</span>, <span class="string">"nine"</span>,</span><br><span class="line">                    <span class="string">"ninth"</span>, <span class="string">"no"</span>, <span class="string">"nobody"</span>, <span class="string">"none"</span>, <span class="string">"nor"</span>, <span class="string">"nothing"</span>,</span><br><span class="line">                    <span class="string">"notwithstanding"</span>, <span class="string">"number"</span>, <span class="string">"numbers"</span>, <span class="string">"of"</span>, <span class="string">"off"</span>, <span class="string">"on"</span>,</span><br><span class="line">                    <span class="string">"once"</span>, <span class="string">"one"</span>, <span class="string">"onto"</span>, <span class="string">"opposite"</span>, <span class="string">"or"</span>, <span class="string">"other"</span>, <span class="string">"ought"</span>,</span><br><span class="line">                    <span class="string">"our"</span>, <span class="string">"ours"</span>, <span class="string">"ourselves"</span>, <span class="string">"out"</span>, <span class="string">"outside"</span>, <span class="string">"over"</span>, <span class="string">"part"</span>,</span><br><span class="line">                    <span class="string">"past"</span>, <span class="string">"pending"</span>, <span class="string">"per"</span>, <span class="string">"pertaining"</span>, <span class="string">"place"</span>, <span class="string">"plenty"</span>,</span><br><span class="line">                    <span class="string">"plethora"</span>, <span class="string">"plus"</span>, <span class="string">"quantities"</span>, <span class="string">"quantity"</span>, <span class="string">"quarter"</span>,</span><br><span class="line">                    <span class="string">"regarding"</span>, <span class="string">"remainder"</span>, <span class="string">"respecting"</span>, <span class="string">"rest"</span>, <span class="string">"round"</span>,</span><br><span class="line">                    <span class="string">"save"</span>, <span class="string">"saving"</span>, <span class="string">"second"</span>, <span class="string">"seven"</span>, <span class="string">"seventh"</span>, <span class="string">"several"</span>,</span><br><span class="line">                    <span class="string">"shall"</span>, <span class="string">"she"</span>, <span class="string">"should"</span>, <span class="string">"similar"</span>, <span class="string">"since"</span>, <span class="string">"six"</span>, <span class="string">"sixth"</span>,</span><br><span class="line">                    <span class="string">"so"</span>, <span class="string">"some"</span>, <span class="string">"somebody"</span>, <span class="string">"someone"</span>, <span class="string">"something"</span>, <span class="string">"spite"</span>,</span><br><span class="line">                    <span class="string">"such"</span>, <span class="string">"ten"</span>, <span class="string">"tenth"</span>, <span class="string">"than"</span>, <span class="string">"thanks"</span>, <span class="string">"that"</span>, <span class="string">"the"</span>,</span><br><span class="line">                    <span class="string">"their"</span>, <span class="string">"theirs"</span>, <span class="string">"them"</span>, <span class="string">"themselves"</span>, <span class="string">"then"</span>, <span class="string">"thence"</span>,</span><br><span class="line">                  <span class="string">"therefore"</span>, <span class="string">"these"</span>, <span class="string">"they"</span>, <span class="string">"third"</span>, <span class="string">"this"</span>, <span class="string">"those"</span>,</span><br><span class="line"><span class="string">"though"</span>, <span class="string">"three"</span>, <span class="string">"through"</span>, <span class="string">"throughout"</span>, <span class="string">"thru"</span>, <span class="string">"thus"</span>,</span><br><span class="line"><span class="string">"till"</span>, <span class="string">"time"</span>, <span class="string">"to"</span>, <span class="string">"tons"</span>, <span class="string">"top"</span>, <span class="string">"toward"</span>, <span class="string">"towards"</span>,</span><br><span class="line"><span class="string">"two"</span>, <span class="string">"under"</span>, <span class="string">"underneath"</span>, <span class="string">"unless"</span>, <span class="string">"unlike"</span>, <span class="string">"until"</span>,</span><br><span class="line"><span class="string">"unto"</span>, <span class="string">"up"</span>, <span class="string">"upon"</span>, <span class="string">"us"</span>, <span class="string">"used"</span>, <span class="string">"various"</span>, <span class="string">"versus"</span>,</span><br><span class="line"><span class="string">"via"</span>, <span class="string">"view"</span>, <span class="string">"wanting"</span>, <span class="string">"was"</span>, <span class="string">"we"</span>, <span class="string">"were"</span>, <span class="string">"what"</span>,</span><br><span class="line"><span class="string">"whatever"</span>, <span class="string">"when"</span>, <span class="string">"whenever"</span>, <span class="string">"where"</span>, <span class="string">"whereas"</span>,</span><br><span class="line"><span class="string">"wherever"</span>, <span class="string">"whether"</span>, <span class="string">"which"</span>, <span class="string">"whichever"</span>, <span class="string">"while"</span>,</span><br><span class="line">                  <span class="string">"whilst"</span>, <span class="string">"who"</span>, <span class="string">"whoever"</span>, <span class="string">"whole"</span>, <span class="string">"whom"</span>, <span class="string">"whomever"</span>,</span><br><span class="line"><span class="string">"whose"</span>, <span class="string">"will"</span>, <span class="string">"with"</span>, <span class="string">"within"</span>, <span class="string">"without"</span>, <span class="string">"would"</span>, <span class="string">"yet"</span>,</span><br><span class="line"><span class="string">"you"</span>, <span class="string">"your"</span>, <span class="string">"yours"</span>, <span class="string">"yourself"</span>, <span class="string">"yourselves"</span>]</span><br></pre></td></tr></table></figure>
</li>
<li><p>使用<code>CountVectorizer</code>抽取词频特征。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.feature_extraction.text <span class="keyword">import</span> CountVectorizer</span><br><span class="line">extractor = CountVectorizer(vocabulary=function_words)</span><br></pre></td></tr></table></figure>
</li>
<li><p>设置支持向量机参数，创建分类器实例。高斯内核如rbf，只适用于特征数小于10000的情况。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.svm <span class="keyword">import</span> SVC</span><br><span class="line">parameters = &#123;<span class="string">'kernel'</span>:(<span class="string">'linear'</span>, <span class="string">'rbf'</span>), <span class="string">'C'</span>:[<span class="number">1</span>, <span class="number">10</span>]&#125;</span><br><span class="line">svr = SVC()</span><br></pre></td></tr></table></figure>
</li>
<li><p>使用网络搜索法寻找最优参数值。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn <span class="keyword">import</span> grid_search</span><br><span class="line">grid = grid_search.GridSearchCV(svr, parameters)</span><br></pre></td></tr></table></figure>
</li>
<li><p>组建流水线。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">pipeline1 = Pipeline([(<span class="string">'feature_extraction'</span>, extractor),</span><br><span class="line">                      (<span class="string">'clf'</span>, grid)</span><br><span class="line">                     ])</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="9-3-支持向量机"><a href="#9-3-支持向量机" class="headerlink" title="9.3 支持向量机"></a>9.3 支持向量机</h3><ul>
<li>支持向量机是一种二类分类器。SVM要做的是找到最佳的一条线，分割开两个类别的数据，让各点到分割线之间的距离最大化，用它做预测。</li>
<li>对于多分类问题，就创建多个分类器，最简单的方法是分为1对多，即特定类和其他类。</li>
<li><code>from sklearn.svm import SVC</code>的参数：<ul>
<li>C参数：与分类器正确分类的比例有关，过高可能过拟合，过小分类结果可能较差。</li>
<li>kernel参数：指定内核函数。如果数据线性不可分，则要加入伪特征将其置入高维空间，直到其线性可分。寻找最佳分割线时，需要计算个体之间的点积，使用点积函数可以创建新特征而无需实际定义这些特征。因此内核函数定义为数据集中两个个体函数的点积。内核有三种：线性内核，多项式内核，高斯内核。</li>
</ul>
</li>
</ul>
<h3 id="9-4-字符N元语法"><a href="#9-4-字符N元语法" class="headerlink" title="9.4 字符N元语法"></a>9.4 字符N元语法</h3><ul>
<li><p>N元语法由一系列N个为一组的对象组成，N通常为2到6之间的值。基于字符的N元语法在作者归属问题上效果很好。更常见的是基于单词的N元语法。</p>
</li>
<li><p>N元语法的特征如<code>&lt;e t&gt;</code>是由e、空格和t组成的。</p>
</li>
<li><p>字符N元语法的特点是稀疏，但低于基于单词的N元语法。</p>
</li>
<li><p>抽取N元语法，analyzer指定了抽取字符，ngram_range指定N的范围，取同样长度的N元语法，则使用相同的值。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">CountVectorizer(analyzer=<span class="string">'char'</span>, ngram_range=(<span class="number">3</span>, <span class="number">3</span>))</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="9-5-使用安然公司数据集"><a href="#9-5-使用安然公司数据集" class="headerlink" title="9.5 使用安然公司数据集"></a>9.5 使用安然公司数据集</h3><ul>
<li><p>初始化邮件解析器。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> email.parser <span class="keyword">import</span> Parser</span><br><span class="line">p = Parser()</span><br></pre></td></tr></table></figure>
</li>
<li><p>为保证数据集相对平衡，设定发件人最少发件数和最大抽取邮件数。</p>
</li>
<li><p>打乱邮箱地址。因listdir每次获取的邮箱顺序不一定相同，所以先排序，再打乱。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">email_addresses = sorted(os.listdir(data_folder))</span><br><span class="line">random_state.shuffle(email_addresses)</span><br></pre></td></tr></table></figure>
</li>
<li><p>解析邮件，获取邮件内容。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">contents = [p.parsestr(email)._payload <span class="keyword">for</span> email <span class="keyword">in</span> authored_emails]</span><br><span class="line">documents.extend(contents)</span><br></pre></td></tr></table></figure>
</li>
<li><p>获取安然语料库函数如下：</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">get_enron_corpus</span><span class="params">(num_authors=<span class="number">10</span>, data_folder=data_folder,</span></span></span><br><span class="line"><span class="function"><span class="params">                     min_docs_author=<span class="number">10</span>, max_docs_author=<span class="number">100</span>,</span></span></span><br><span class="line"><span class="function"><span class="params">                     random_state=None)</span>:</span></span><br><span class="line">    random_state = check_random_state(random_state)</span><br><span class="line">    email_addresses = sorted(os.listdir(data_folder))</span><br><span class="line">    random_state.shuffle(email_addresses)</span><br><span class="line">    documents = []</span><br><span class="line">    classes = []</span><br><span class="line">    author_num = <span class="number">0</span></span><br><span class="line">    authors = &#123;&#125;</span><br><span class="line">    <span class="keyword">for</span> user <span class="keyword">in</span> email_addresses:</span><br><span class="line">        users_email_folder = os.path.join(data_folder, user)</span><br><span class="line">        mail_folders = [os.path.join(users_email_folder, subfolder)</span><br><span class="line">                        <span class="keyword">for</span> subfolder <span class="keyword">in</span> os.listdir(users_email_folder)</span><br><span class="line">                        <span class="keyword">if</span> <span class="string">"sent"</span> <span class="keyword">in</span> subfolder]</span><br><span class="line">        <span class="keyword">try</span>:</span><br><span class="line">            authored_emails = [open(os.path.join(mail_folder, email_filename), encoding=<span class="string">'cp1252'</span>).read()</span><br><span class="line">                               <span class="keyword">for</span> mail_folder <span class="keyword">in</span> mail_folders</span><br><span class="line">                               <span class="keyword">for</span> email_filename <span class="keyword">in</span> os.listdir(mail_folder)]</span><br><span class="line">        <span class="keyword">except</span> IsADirectoryError:</span><br><span class="line">            <span class="keyword">continue</span></span><br><span class="line">        <span class="keyword">if</span> len(authored_emails) &lt; min_docs_author:</span><br><span class="line">            <span class="keyword">continue</span></span><br><span class="line">        <span class="keyword">if</span> len(authored_emails) &gt; max_docs_author:</span><br><span class="line">            authored_emails = authored_emails[:max_docs_author]</span><br><span class="line">        contents = [p.parsestr(email)._payload <span class="keyword">for</span> email <span class="keyword">in</span> authored_emails]</span><br><span class="line">        documents.extend(contents)</span><br><span class="line">        classes.extend([author_num] * len(authored_emails))</span><br><span class="line">        authors[user] = author_num</span><br><span class="line">        author_num += <span class="number">1</span></span><br><span class="line">        <span class="keyword">if</span> author_num &gt;= num_authors <span class="keyword">or</span> author_num &gt;= len(email_addresses):</span><br><span class="line">            <span class="keyword">break</span></span><br><span class="line">    <span class="keyword">return</span> documents, np.array(classes), authors</span><br></pre></td></tr></table></figure>
</li>
<li><p>由于回复邮件时会带上别人之前邮件的内容，因此要进行处理。</p>
</li>
<li><p>使用<code>import quotequail</code>查找邮件中的新内容。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">remove_replies</span><span class="params">(email_contents)</span>:</span></span><br><span class="line">    r = quotequail.unwrap(email_contents)</span><br><span class="line">    <span class="keyword">if</span> r <span class="keyword">is</span> <span class="literal">None</span>:</span><br><span class="line">        <span class="keyword">return</span> email_contents</span><br><span class="line">    <span class="keyword">if</span> <span class="string">'text_top'</span> <span class="keyword">in</span> r:</span><br><span class="line">        <span class="keyword">return</span> r[<span class="string">'text_top'</span>]</span><br><span class="line">    <span class="keyword">elif</span> <span class="string">'text'</span> <span class="keyword">in</span> r:</span><br><span class="line">        <span class="keyword">return</span> r[<span class="string">'text'</span>]</span><br><span class="line">    <span class="keyword">return</span> email_contents</span><br></pre></td></tr></table></figure>
</li>
<li><p>线上学习：使用新数据更新训练结果，但不是每次都重新进行训练。</p>
</li>
<li><p>输出最佳训练参数。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">print(pipeline.named_steps[<span class="string">'classifier'</span>].best_params_)</span><br></pre></td></tr></table></figure>
</li>
<li><p>创建混淆矩阵，获取发件人。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.metrics <span class="keyword">import</span> confusion_matrix</span><br><span class="line">cm = confusion_matrix(y_pred, y_test)</span><br><span class="line">cm = cm / cm.astype(np.float).sum(axis=<span class="number">1</span>)</span><br><span class="line">sorted_authors = sorted(authors.keys(), key=<span class="keyword">lambda</span> x:authors[x])</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h2 id="第十章-新闻预料分类"><a href="#第十章-新闻预料分类" class="headerlink" title="第十章 新闻预料分类"></a>第十章 新闻预料分类</h2><h3 id="10-1-获取新闻文章"><a href="#10-1-获取新闻文章" class="headerlink" title="10.1 获取新闻文章"></a>10.1 获取新闻文章</h3><ul>
<li><p>已知目标类别的学习任务叫有监督学习，未知目标类别的学习任务叫无监督学习。</p>
</li>
<li><p>使用WEB API采集数据，如使用twitterAPI采集数据，有三个注意事项：</p>
<ul>
<li>授权方法：是数据提供方用来管理数据采集方的。</li>
<li>采集频率：限制了采集方在约定时间内的最大请求数。</li>
<li>API端点：用来抽取信息的实际网址。</li>
</ul>
</li>
<li><p>获取信息时发送HTTP GET请求到指定网址。服务器返回资源信息、信息类型和ID。</p>
</li>
<li><p>从reddit上创建script型应用，获得client ID和密钥。</p>
</li>
<li><p>设置唯一用户代理，避免与其他API重复，影响采集限制。<code>USER_AGENT = &quot;python:&lt;unique user agent&gt; (by /u/&lt;reddit username&gt;)&quot;</code></p>
</li>
<li><p>登录获取令牌。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">login</span><span class="params">(username, password)</span>:</span></span><br><span class="line">    <span class="keyword">if</span> password <span class="keyword">is</span> <span class="literal">None</span>:</span><br><span class="line">        password = getpass.getpass(<span class="string">"Enter reddit password for username &#123;&#125;:"</span>.format(username))</span><br><span class="line">    headers = &#123;<span class="string">"User-Agent"</span>: USER_AGENT&#125;</span><br><span class="line">    client_auth = requests.auth.HTTPBasicAuth(CLIENT_ID, CLIENT_SECRET)</span><br><span class="line">    post_data = &#123;<span class="string">"grant_type"</span>: <span class="string">"password"</span>, <span class="string">"username"</span>:username, <span class="string">"password"</span>:password&#125;</span><br><span class="line">    response = requests.post(<span class="string">"https://www.reddit.com/api/v1/access_token"</span>, auth=client_auth, data=post_data, headers=headers)</span><br><span class="line">    <span class="keyword">return</span> response.json()</span><br></pre></td></tr></table></figure>
</li>
<li><p>指定reddit栏目搜集信息。设置头部。获取返回的信息。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line">subreddit = <span class="string">"worldnews"</span></span><br><span class="line">url = <span class="string">"https://oauth.reddit.com/r/&#123;&#125;"</span>.format(subreddit)</span><br><span class="line">headers = &#123;<span class="string">"Authorization"</span>: <span class="string">"bearer &#123;&#125;"</span>.format(token[<span class="string">'access_token'</span>]), <span class="string">"User-Agent"</span>: USER_AGENT&#125;</span><br><span class="line">response = requests.get(url,headers=headers)</span><br></pre></td></tr></table></figure>
</li>
<li><p>输出每条广播的标题。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">result = response.json()</span><br><span class="line"><span class="keyword">for</span> story <span class="keyword">in</span> result[<span class="string">'data'</span>][<span class="string">'children'</span>]:</span><br><span class="line">    print(story[<span class="string">'data'</span>][<span class="string">'title'</span>])</span><br></pre></td></tr></table></figure>
</li>
<li><p>获取500条广播的标题、连接和喜欢数。因为每页最多是100条广播，因此要用游标，reddit的游标是after。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">get_links</span><span class="params">(subreddit, token, n_pages=<span class="number">5</span>)</span>:</span></span><br><span class="line">    stories = []</span><br><span class="line">    after = <span class="literal">None</span></span><br><span class="line">    <span class="keyword">for</span> page_number <span class="keyword">in</span> range(n_pages):</span><br><span class="line">        headers = &#123;<span class="string">"Authorization"</span>: <span class="string">"bearer &#123;&#125;"</span>.format(token[<span class="string">'access_token'</span>]), <span class="string">"User-Agent"</span>: USER_AGENT&#125;</span><br><span class="line">        url = <span class="string">"https://oauth.reddit.com/r/&#123;&#125;?limit=100"</span>.format(subreddit)</span><br><span class="line">        <span class="keyword">if</span> after:</span><br><span class="line">            url += <span class="string">"&amp;after=&#123;&#125;"</span>.format(after)</span><br><span class="line">        response = requests.get(url,headers=headers)</span><br><span class="line">        result = response.json()</span><br><span class="line">        after = result[<span class="string">'data'</span>][<span class="string">'after'</span>]</span><br><span class="line">        sleep(<span class="number">2</span>)</span><br><span class="line">        stories.extend([(story[<span class="string">'data'</span>][<span class="string">'title'</span>], story[<span class="string">'data'</span>][<span class="string">'url'</span>], story[<span class="string">'data'</span>][<span class="string">'score'</span>]) <span class="keyword">for</span> story <span class="keyword">in</span> result[<span class="string">'data'</span>][<span class="string">'children'</span>]])</span><br><span class="line">    <span class="keyword">return</span> stories</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="10-2-从任意网站抽取文本"><a href="#10-2-从任意网站抽取文本" class="headerlink" title="10.2 从任意网站抽取文本"></a>10.2 从任意网站抽取文本</h3><ul>
<li><p>使用中文系统中的txt文件默认编码为gbk，要改成utf8，否则大量英文信息无法正确编码。爬取连接信息时，要加上头部，以免被识别为爬虫而无法正常返回信息。reddit标题不唯一，因此使用md5获取散列值作为文件名，md5在小规模数据中是可靠的。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">import</span> os</span><br><span class="line"><span class="keyword">import</span> hashlib</span><br><span class="line"><span class="keyword">import</span> codecs</span><br><span class="line">data_floder = <span class="string">"raw/"</span></span><br><span class="line">number_errors = <span class="number">0</span></span><br><span class="line"><span class="keyword">for</span> title, url, score <span class="keyword">in</span> stories:</span><br><span class="line">    output_filename = hashlib.md5(url.encode()).hexdigest()</span><br><span class="line">    fullpath = os.path.join(data_floder, output_filename + <span class="string">".txt"</span>)</span><br><span class="line">    headers = &#123;</span><br><span class="line">    <span class="string">'User-Agent'</span>: <span class="string">'Mozilla/4.0(compatible; MSIE 5.5; Windows NT)'</span></span><br><span class="line">    &#125;</span><br><span class="line">    <span class="keyword">try</span>:</span><br><span class="line">        response = requests.get(url, headers=headers)</span><br><span class="line">        data = response.text</span><br><span class="line">        <span class="keyword">with</span> codecs.open(fullpath, <span class="string">'w'</span>, <span class="string">'utf8'</span>) <span class="keyword">as</span> outf:</span><br><span class="line">            outf.write(data)</span><br><span class="line">    <span class="keyword">except</span> Exception <span class="keyword">as</span> e:</span><br><span class="line">        number_errors += <span class="number">1</span></span><br><span class="line">        print(e)</span><br></pre></td></tr></table></figure>
</li>
<li><p>使用lxml解析HTML文件，lxml的HTML解析器容错能力强，可以处理不规范的HTML代码。</p>
</li>
<li><p>文本抽取分三步：</p>
<ul>
<li>遍历HTML文件的每个节点，抽取其中的文本内容。</li>
<li>跳过JavaScript、样式和注释节点。</li>
<li>确保文本内容长度至少为100个字符。</li>
</ul>
</li>
<li><p>遍历解析树，拼接获取文本。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">get_text_from_node</span><span class="params">(node)</span>:</span></span><br><span class="line">    <span class="keyword">if</span> len(node) == <span class="number">0</span>:</span><br><span class="line">        <span class="keyword">if</span> node.text <span class="keyword">and</span> len(node.text) &gt; <span class="number">100</span>:</span><br><span class="line">            <span class="keyword">return</span> node.text</span><br><span class="line">        <span class="keyword">else</span>:</span><br><span class="line">            <span class="keyword">return</span> <span class="string">""</span></span><br><span class="line">    <span class="keyword">else</span>:</span><br><span class="line">        results = (get_text_from_node(child) <span class="keyword">for</span> child <span class="keyword">in</span> node <span class="keyword">if</span> child.tag <span class="keyword">not</span> <span class="keyword">in</span> skip_node_types)</span><br><span class="line">        <span class="keyword">return</span> <span class="string">"\n"</span>.join(r <span class="keyword">for</span> r <span class="keyword">in</span> results <span class="keyword">if</span> len(r) &gt; <span class="number">1</span>)</span><br><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">get_text_from_file</span><span class="params">(filename)</span>:</span></span><br><span class="line">    <span class="keyword">with</span> codecs.open(filename, encoding=<span class="string">'utf8'</span>) <span class="keyword">as</span> inf:</span><br><span class="line">        html_tree = etree.parse(inf, etree.HTMLParser())</span><br><span class="line">    <span class="keyword">return</span> get_text_from_node(html_tree.getroot())</span><br><span class="line"><span class="keyword">for</span> filename <span class="keyword">in</span> os.listdir(data_floder):</span><br><span class="line">    text = get_text_from_file(os.path.join(data_floder, filename))</span><br><span class="line">    <span class="keyword">with</span> codecs.open(os.path.join(text_output_folder, filename), <span class="string">'w'</span>, <span class="string">'utf8'</span>) <span class="keyword">as</span> outf:</span><br><span class="line">        outf.write(text)</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="10-3-新闻预料分类"><a href="#10-3-新闻预料分类" class="headerlink" title="10.3 新闻预料分类"></a>10.3 新闻预料分类</h3><ul>
<li><p>聚类算法在学习时没有明确的方向性，根据目标函数而不是数据潜在的含义学习。因此聚类算法选择效果好的特征很重要。有监督学习中，算法会自动降低对分类作用不大的特征的权重，而聚类会综合所有特征给出最后结果。</p>
</li>
<li><p>k-means聚类算法迭代寻找能够代表数据的聚类质心点。算法开始时使用从训练数据中随机选取的k个数据点作为质心。在迭代一定次数后，质心移动量很小时，可以终止算法的运行。步骤如下：</p>
<ul>
<li>为每一个数据点分配簇标签，标签根据与各质心的距离选取。</li>
<li>计算各簇内所有数据点均值，更新各簇的质心点。</li>
</ul>
</li>
<li><p><code>from sklearn.cluster import KMeans</code>使用kmeans聚类算法。</p>
</li>
<li><p><code>from sklearn.feature_extraction.text import TfidfVectorizer</code>引入抽取tf-idf特征的向量化工具。</p>
</li>
<li><p>封装流水线，设定max_df=0.4忽略在40%以上文档中出现过的词语。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.pipeline <span class="keyword">import</span> Pipeline</span><br><span class="line">n_clusters = <span class="number">10</span></span><br><span class="line">pipeline = Pipeline([(<span class="string">'feature_extraction'</span>, TfidfVectorizer(max_df=<span class="number">0.4</span>)),</span><br><span class="line">                     (<span class="string">'clusterer'</span>, KMeans(n_clusters=n_clusters))</span><br><span class="line">                     ])</span><br></pre></td></tr></table></figure>
</li>
<li><p>使用Counter函数计算每类数据点个数。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> collections <span class="keyword">import</span> Counter</span><br><span class="line">c = Counter(labels)</span><br><span class="line"><span class="keyword">for</span> cluster_number <span class="keyword">in</span> range(n_clusters):</span><br><span class="line">    print(<span class="string">"Cluster &#123;&#125; contains &#123;&#125; samples"</span>.format(cluster_number, c[cluster_number]))</span><br></pre></td></tr></table></figure>
</li>
<li><p>聚类算法是探索性算法，很难评估算法结果的好坏，评估最直接的方式是根据其学习的标准进行评价。</p>
</li>
<li><p>计算kmeans算法的惯性权重即每个数据点到最近质心点的距离，这个值本身没有意义，但可以用来判断分多少簇合适。</p>
</li>
<li><p>对于每个簇数计算30次。</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br></pre></td><td class="code"><pre><span class="line">inertia_scores = []</span><br><span class="line">n_cluster_values = list(range(2, 20))</span><br><span class="line">for n_clusters in n_cluster_values:</span><br><span class="line">    cur_inertia_scores = []</span><br><span class="line">    X = TfidfVectorizer(max_df=0.4).fit_transform(documents)</span><br><span class="line">    for i in range(30):</span><br><span class="line">        km = KMeans(n_clusters=n_clusters).fit(X)</span><br><span class="line">        cur_inertia_scores.append(km.inertia_)</span><br><span class="line">    inertia_scores.append(cur_inertia_scores)</span><br><span class="line">inertia_scores = np.array(inertia_scores)</span><br></pre></td></tr></table></figure>
</li>
<li><p>计算均值和标准差，画出图像。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line">%matplotlib inline</span><br><span class="line"><span class="keyword">from</span> matplotlib <span class="keyword">import</span> pyplot <span class="keyword">as</span> plt</span><br><span class="line"></span><br><span class="line">inertia_means = np.mean(inertia_scores, axis=<span class="number">1</span>)</span><br><span class="line">inertia_stderr = np.std(inertia_scores, axis=<span class="number">1</span>)</span><br><span class="line"></span><br><span class="line">fig = plt.figure(figsize=(<span class="number">40</span>,<span class="number">20</span>))</span><br><span class="line">plt.errorbar(n_cluster_values, inertia_means, inertia_stderr, color=<span class="string">'green'</span>)</span><br><span class="line">plt.show()</span><br></pre></td></tr></table></figure>
</li>
<li><p>随着簇增加，惯性权重逐渐减少，但当簇数为kt时，惯性权重最后进行了一次大的调整，如同图像的肘部，称为拐点。有的数据集拐点明显，有的数据集则没有拐点。</p>
</li>
<li><p>从质心找出特征值最大的5个特征。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line">print(<span class="string">"  Most important terms"</span>)</span><br><span class="line">    centroid = pipeline.named_steps[<span class="string">'clusterer'</span>].cluster_centers_[cluster_number]</span><br><span class="line">    most_important = centroid.argsort()</span><br><span class="line">    <span class="keyword">for</span> i <span class="keyword">in</span> range(<span class="number">5</span>):</span><br><span class="line">        term_index = most_important[-(i+<span class="number">1</span>)]</span><br><span class="line">        print(<span class="string">"  &#123;0&#125;) &#123;1&#125; (score: &#123;2:.4f&#125;)"</span>.format(i+<span class="number">1</span>, terms[term_index], centroid[term_index]))</span><br></pre></td></tr></table></figure>
</li>
<li><p>k聚类算法可用来简化特征，其他特征简化方法如主成分分析、潜在语义索引的计算要求很高。使用数据点到质心点的距离作为特征，来简化特征。</p>
</li>
<li><p>简化特征后可以进行二次聚类。</p>
</li>
<li><p>分类时也可使用聚类来简化特征：</p>
<ul>
<li>使用标注好的数据选取特征</li>
<li>用聚类方法简化特征</li>
<li>用分类算法对前面处理好的数据分类</li>
</ul>
</li>
</ul>
<h3 id="10-4-聚类融合"><a href="#10-4-聚类融合" class="headerlink" title="10.4 聚类融合"></a>10.4 聚类融合</h3><ul>
<li><p>聚类融合后的算法能够平滑算法多次运行得到的不同结果，也可以减少参数选择对于最终结果的影响。</p>
</li>
<li><p>证据累积算法：对数据多次聚类，每次都记录各个数据点的簇标签，计算每两个数据点被分到同一个簇的次数。步骤如下：</p>
<ul>
<li>使用kmeans等低水平聚类算法对数据集进行多次聚类，记录每一次迭代两个数据点出现在同一簇的频率，将结果保存到共协矩阵。</li>
<li>使用分级聚类对第一步得到的共协矩阵进行聚类分析。分级聚类等价于找到一棵把所有节点连接到一起的树，并把权重低的边去掉。</li>
</ul>
</li>
<li><p><code>from scipy.sparse import csr_matrix</code>使用scipy的稀疏矩阵csr_matrix。稀疏矩阵由一系列记录非零值位置的列表组成。</p>
</li>
<li><p>创建共协矩阵。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">create_coassociation_matrix</span><span class="params">(labels)</span>:</span></span><br><span class="line">    rows = []</span><br><span class="line">    cols = []</span><br><span class="line">    unique_labels = set(labels)</span><br><span class="line">    <span class="keyword">for</span> label <span class="keyword">in</span> unique_labels:</span><br><span class="line">        indices = np.where(labels == label)[<span class="number">0</span>]</span><br><span class="line">        <span class="keyword">for</span> index1 <span class="keyword">in</span> indices:</span><br><span class="line">            <span class="keyword">for</span> index2 <span class="keyword">in</span> indices:</span><br><span class="line">                rows.append(index1)</span><br><span class="line">                cols.append(index2)</span><br><span class="line">    data = np.ones((len(rows),))</span><br><span class="line">    <span class="keyword">return</span> csr_matrix((data, (rows, cols)), dtype=<span class="string">'float'</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>分级聚类即找到该矩阵的最小生成树，删除权重低于阈值的边。</p>
</li>
<li><p>生成树是所有节点都连接到一起的树。</p>
</li>
<li><p>最小生成树是总权重最低的生成树。</p>
</li>
<li><p>图中的节点是数据集中的个体，边是被分到同一簇的次数即共协矩阵的值。</p>
</li>
<li><p><code>from scipy.sparse.csgraph import minimum_spanning_tree</code>使用scipy中的minimum_spanning_tree计算最小生成树。<code>mst = minimum_spanning_tree(C)</code>，函数输入为距离，因此要取反。</p>
</li>
<li><p>再次遍历，得到第二次聚类的共协矩阵，删除不是两个共协矩阵中都出现的边。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><span class="line">pipeline = Pipeline([(<span class="string">'feature_extraction'</span>, TfidfVectorizer(max_df=<span class="number">0.4</span>)),</span><br><span class="line">                     (<span class="string">'clusterer'</span>, KMeans(n_clusters=<span class="number">3</span>))</span><br><span class="line">                     ])</span><br><span class="line">pipeline.fit(documents)</span><br><span class="line">labels2 = pipeline.predict(documents)</span><br><span class="line">C2 = create_coassociation_matrix(labels2)</span><br><span class="line">C_sum = (C + C2) / <span class="number">2</span></span><br><span class="line">C_sum.todense()</span><br><span class="line">mst = minimum_spanning_tree(-C_sum)</span><br><span class="line">mst.data[mst.data &gt; <span class="number">-1</span>] = <span class="number">0</span></span><br><span class="line">mst.eliminate_zeros()</span><br></pre></td></tr></table></figure>
</li>
<li><p>找到所有连通分支。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> scipy.sparse.csgraph <span class="keyword">import</span> connected_components</span><br><span class="line">number_of_clusters, labels = connected_components(mst)</span><br></pre></td></tr></table></figure>
</li>
<li><p>kmeans算法假定所有特征取值范围相同，找的是圆形簇。当簇不是圆形的时，用kmeans聚类有难度。</p>
</li>
<li><p>证据累积算法把特征重新映射到新空间，证据累积算法只关心数据点之间的距离而不是原先在特征空间的位置。但仍需进行数据规范化。</p>
</li>
<li><p>指定n_clusterings次聚类进行融合，删除边的阈值为cut_threshold，每次聚类簇的范围为n_clusters_range。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.base <span class="keyword">import</span> BaseEstimator, ClusterMixin</span><br><span class="line"></span><br><span class="line"><span class="class"><span class="keyword">class</span> <span class="title">EAC</span><span class="params">(BaseEstimator, ClusterMixin)</span>:</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">__init__</span><span class="params">(self, n_clusterings=<span class="number">10</span>, cut_threshold=<span class="number">0.5</span>, n_clusters_range=<span class="params">(<span class="number">3</span>, <span class="number">10</span>)</span>)</span>:</span></span><br><span class="line">        self.n_clusterings = n_clusterings</span><br><span class="line">        self.cut_threshold = cut_threshold</span><br><span class="line">        self.n_clusters_range = n_clusters_range</span><br><span class="line">    </span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">fit</span><span class="params">(self, X, y=None)</span>:</span></span><br><span class="line">        C = sum((create_coassociation_matrix(self._single_clustering(X))</span><br><span class="line">                 <span class="keyword">for</span> i <span class="keyword">in</span> range(self.n_clusterings)))</span><br><span class="line">        mst = minimum_spanning_tree(-C)</span><br><span class="line">        mst.data[mst.data &gt; -self.cut_threshold] = <span class="number">0</span></span><br><span class="line">        mst.eliminate_zeros()</span><br><span class="line">        self.n_components, self.labels_ = connected_components(mst)</span><br><span class="line">        <span class="keyword">return</span> self</span><br><span class="line">    </span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">_single_clustering</span><span class="params">(self, X)</span>:</span></span><br><span class="line">        n_clusters = np.random.randint(*self.n_clusters_range)</span><br><span class="line">        km = KMeans(n_clusters=n_clusters)</span><br><span class="line">        <span class="keyword">return</span> km.fit_predict(X)</span><br><span class="line">    </span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">fit_predict</span><span class="params">(self, X)</span>:</span></span><br><span class="line">        self.fit(X)</span><br><span class="line">        <span class="keyword">return</span> self.labels_</span><br></pre></td></tr></table></figure>
</li>
<li><p>组成流水线。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">pipeline = Pipeline([(<span class="string">'feature_extraction'</span>, TfidfVectorizer(max_df=<span class="number">0.4</span>)),</span><br><span class="line">                     (<span class="string">'clusterer'</span>, EAC())</span><br><span class="line">                     ])</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="10-5-线上学习"><a href="#10-5-线上学习" class="headerlink" title="10.5 线上学习"></a>10.5 线上学习</h3><ul>
<li><p>当没有足够数据用来训练，或内存不能一次装下所有数据，或完成预测后得到了新的数据，此时可以使用线上学习。</p>
</li>
<li><p>线上学习是指用新数据增量地改进模型。神经网络是支持线上学习的标准例子。</p>
</li>
<li><p>神经网络也支持使用批模式进行训练，每次只使用一组数据进行训练，运行速度快但耗内存多。</p>
</li>
<li><p>线上学习与流式学习有关，不同点在于：线上学习能重新评估先前创建模型时所用的数据，但后者的数据只能用一次。</p>
</li>
<li><p><code>from sklearn.cluster import MiniBatchKMeans</code>支持线上学习，实现了partial_fit函数进行线上学习，而fit函数则会删除之前的训练结果。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line">mbkm = MiniBatchKMeans(random_state=<span class="number">14</span>, n_clusters=<span class="number">3</span>)</span><br><span class="line">batch_size = <span class="number">500</span></span><br><span class="line"></span><br><span class="line">indices = np.arange(<span class="number">0</span>, X.shape[<span class="number">0</span>])</span><br><span class="line"><span class="keyword">for</span> iteration <span class="keyword">in</span> range(<span class="number">100</span>):</span><br><span class="line">    sample = np.random.choice(indices, size=batch_size, replace=<span class="literal">True</span>)</span><br><span class="line">    mbkm.partial_fit(X[sample[:batch_size]])</span><br></pre></td></tr></table></figure>
</li>
<li><p>由于TfidfVectorizer不是线上学习算法，所以改用<code>from sklearn.feature_extraction.text import HashingVectorizer</code>，使用散列值代替特征名称，记录词袋模型。</p>
</li>
<li><p>创建支持线上学习的pipeline类。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line"><span class="class"><span class="keyword">class</span> <span class="title">PartialFitPipeline</span><span class="params">(Pipeline)</span>:</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">partial_fit</span><span class="params">(self, X, y=None)</span>:</span></span><br><span class="line">        Xt = X</span><br><span class="line">        <span class="keyword">for</span> name, transform <span class="keyword">in</span> self.steps[:<span class="number">-1</span>]:</span><br><span class="line">            Xt = transform.transform(Xt)</span><br><span class="line">        <span class="keyword">return</span> self.steps[<span class="number">-1</span>][<span class="number">1</span>].partial_fit(Xt, y=y)</span><br></pre></td></tr></table></figure>
</li>
<li><p>组装成流水线。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">pipeline = PartialFitPipeline([(<span class="string">'feature_extraction'</span>, HashingVectorizer()),</span><br><span class="line">                             (<span class="string">'clusterer'</span>, MiniBatchKMeans(random_state=<span class="number">14</span>, n_clusters=<span class="number">3</span>))</span><br><span class="line">                             ])</span><br></pre></td></tr></table></figure>
</li>
<li><p>用批模式训练。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line">batch_size = <span class="number">10</span></span><br><span class="line"><span class="keyword">for</span> iteration <span class="keyword">in</span> range(int(len(documents) / batch_size)):</span><br><span class="line">    start = batch_size * iteration</span><br><span class="line">    end = batch_size * (iteration + <span class="number">1</span>)</span><br><span class="line">    pipeline.partial_fit(documents[start:end])</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h2 id="第十一章-用深度学习方法为图像中的物体进行分类"><a href="#第十一章-用深度学习方法为图像中的物体进行分类" class="headerlink" title="第十一章 用深度学习方法为图像中的物体进行分类"></a>第十一章 用深度学习方法为图像中的物体进行分类</h2><h3 id="11-1-应用场景和目标"><a href="#11-1-应用场景和目标" class="headerlink" title="11.1 应用场景和目标"></a>11.1 应用场景和目标</h3><ul>
<li><p>使用CIFAR-10数据集进行训练，所用图像均为numpy数组。</p>
</li>
<li><p>图像数据格式为pickle，pickle是保存图形对象的一个库，调用<code>pickle.load</code>读取数据。编码设置为Latin，防止不同版本python导致的编码错误。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">import</span> pickle</span><br><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">unpickle</span><span class="params">(filename)</span>:</span></span><br><span class="line">    <span class="keyword">with</span> open(filename, <span class="string">'rb'</span>) <span class="keyword">as</span> fo:</span><br><span class="line">        <span class="keyword">return</span> pickle.load(fo, encoding=<span class="string">'latin1'</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>将列表数据转换成能用matplotlib绘制的图像，并旋转图片。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">image = image.reshape((<span class="number">32</span>,<span class="number">32</span>, <span class="number">3</span>), order=<span class="string">'F'</span>)</span><br><span class="line"><span class="keyword">import</span> numpy <span class="keyword">as</span> np</span><br><span class="line">image = np.rot90(image, <span class="number">-1</span>)</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="11-2-深度神经网络"><a href="#11-2-深度神经网络" class="headerlink" title="11.2 深度神经网络"></a>11.2 深度神经网络</h3><ul>
<li><p>至少包含两层隐含层的神经网络被称为深度神经网络。更巧妙的算法能减少实际需要的层数。</p>
</li>
<li><p>神经网络接收很基础的特征作为输入，就计算机视觉而言，输入为简单的像素值。经过神经网络，基础的特征组合成复杂的特征。</p>
</li>
<li><p>一个神经网络可以用一组矩阵表示，每层增加一个偏置项，永远激活并与下一层的每个神经元都有连接。</p>
</li>
<li><p>Theano是用来创建和运行数学表达式的工具。和SQL相似，在Theano中只需定义要做什么要不是怎么做。</p>
</li>
<li><p>Theano用来定义函数，处理标量、数组和矩阵及其他数学表达式。</p>
</li>
<li><p>引入张量。定义两个标量数值型输入。构成表达式。定义计算表达式的函数。要注意theano和numpy包的兼容。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">import</span> theano</span><br><span class="line"><span class="keyword">from</span> theano <span class="keyword">import</span> tensor <span class="keyword">as</span> T</span><br><span class="line">a = T.dscalar()</span><br><span class="line">b = T.dscalar()</span><br><span class="line">c = T.sqrt(a ** <span class="number">2</span> + b ** <span class="number">2</span>)</span><br><span class="line">f = theano.function([a,b], c)</span><br></pre></td></tr></table></figure>
</li>
<li><p>Lasagne库基于Theano库，专门用来构建神经网络，使用Theano 进行计算。实现了几种比较新的神经网络层和组成这些层的模块：</p>
<ul>
<li>内置网络层：这些小神经网络比传统神经网络更容易解释。</li>
<li>删除层：训练过程随机删除神经元，防止产生过拟合问题。</li>
<li>噪音层：为神经元引入噪音，防止过拟合。</li>
</ul>
</li>
<li><p>卷积层使用少量相互连接的神经元，分析有一部分输入值，便于神经网络实现对数据的标准转换。</p>
</li>
<li><p>传统神经网络一层所有神经元全都连接到下一层所有神经元。</p>
</li>
<li><p>池化层接收某个区域最大输出值，可以降低图像中的微小变动带来的噪音，减少信息量，减少后续各层的工作量。</p>
</li>
<li><p>Lasagene对数据类型有要求，将数据类型转为32位。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.datasets <span class="keyword">import</span> load_iris</span><br><span class="line">iris = load_iris()</span><br><span class="line">X = iris.data.astype(np.float32)</span><br><span class="line">y_true = iris.target.astype(np.int32)</span><br></pre></td></tr></table></figure>
</li>
<li><p>创建输入层，指定每一批输入数量为10，神经元数量和特征数量相同。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">input_layer = lasagne.layers.InputLayer(shape=X_train.shape, input_var=input_val)</span><br></pre></td></tr></table></figure>
</li>
<li><p>创建隐含层，从输入层接收输入，指定神经元数量，使用非线性sigmoid函数。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">hidden_layer = lasagne.layers.DenseLayer(input_layer, num_units=<span class="number">12</span>, nonlinearity=lasagne.nonlinearities.sigmoid)</span><br></pre></td></tr></table></figure>
</li>
<li><p>创建输出层，输出层共三个神经元与类别数一致，使用非线性softmax函数。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">output_layer = lasagne.layers.DenseLayer(hidden_layer, num_units=<span class="number">3</span>, nonlinearity=lasagne.nonlinearities.softmax)</span><br></pre></td></tr></table></figure>
</li>
<li><p>Lasagne中，输入数据先提交到输出层，再向上回溯，直到输入层，将数据交给输入层处理。</p>
</li>
<li><p>定义输入、输出、目标数据变量。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">import</span> lasagne</span><br><span class="line">input_val = T.fmatrix(<span class="string">"inputs"</span>)</span><br><span class="line">target_val = T.ivector(<span class="string">"targets"</span>)</span><br><span class="line">output_val = lasagne.layers.get_output(output_layer)</span><br></pre></td></tr></table></figure>
</li>
<li><p>定义损失函数，训练神经网络时以最小化损失函数为前提。使用交叉熵表示损失，这是一种衡量分类数据分类效果好坏的标准。损失函数表示实际网络输出与期望输出之间的差距。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">loss = lasagne.objectives.categorical_crossentropy(output_val, target_val)</span><br><span class="line">loss = loss.mean()</span><br></pre></td></tr></table></figure>
</li>
<li><p>获取所有参数，调整网络权重，使损失降到最小。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">all_params = lasagne.layers.get_all_params(output_layer, trainable=<span class="literal">True</span>)</span><br><span class="line">updates = lasagne.updates.sgd(loss, all_params, learning_rate=<span class="number">0.1</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>定义训练函数和获取输出的函数。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">train = theano.function([input_val, target_val], loss, updates=updates, allow_input_downcast=<span class="literal">True</span>)</span><br><span class="line">get_output = theano.function([input_val], output_val)</span><br></pre></td></tr></table></figure>
</li>
<li><p>进行1000次迭代，逐渐改进神经网络。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">for</span> n <span class="keyword">in</span> range(<span class="number">1000</span>):</span><br><span class="line">    train(X_train, y_train)</span><br></pre></td></tr></table></figure>
</li>
<li><p>获取测试集的输出结果及各神经元激励作用的大小，找到激励作用最大的神经元，得到预测结果。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line">y_output = get_output(X_test)</span><br><span class="line">y_pred = np.argmax(y_output, axis=<span class="number">1</span>)</span><br><span class="line"><span class="keyword">from</span> sklearn.metrics <span class="keyword">import</span> f1_score</span><br><span class="line">print(f1_score(y_test, y_pred, average=<span class="string">'micro'</span>))</span><br></pre></td></tr></table></figure>
</li>
<li><p>nolearn对Lasagne实现了封装，可读性更强，更易管理。</p>
</li>
<li><p>创建由输入层、密集隐含层和密集输出层组成的层级结构。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> lasagne <span class="keyword">import</span> layers</span><br><span class="line">layers=[</span><br><span class="line">    (<span class="string">'input'</span>, layers.InputLayer),</span><br><span class="line">    (<span class="string">'hidden'</span>, layers.DenseLayer),</span><br><span class="line">    (<span class="string">'output'</span>, layers.DenseLayer),</span><br><span class="line">]</span><br></pre></td></tr></table></figure>
</li>
<li><p>定义神经网络，输入神经网络参数，定义非线性函数，指定偏置神经元。偏置神经元激活后可以对问题做更有针对性的训练，以消除训练中的偏差。定义神经网络训练方式，这里使用低冲量值和高学习速率。将分类问题定义为回归问题，因为输出是数值，所以定义为回归问题更好。最大训练步数设为1000。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br></pre></td><td class="code"><pre><span class="line">net1 = NeuralNet(layers=layers,</span><br><span class="line">                input_shape=X.shape,</span><br><span class="line">                hidden_num_units=<span class="number">100</span>,</span><br><span class="line">                output_num_units=<span class="number">26</span>,</span><br><span class="line">                hidden_nonlinearity=sigmoid,</span><br><span class="line">                 output_nonlinearity=softmax,</span><br><span class="line">                 hidden_b=np.zeros((<span class="number">100</span>,), dtype=np.float64),</span><br><span class="line">                 update=updates.momentum,</span><br><span class="line">                 update_learning_rate=<span class="number">0.9</span>,</span><br><span class="line">                 update_momentum=<span class="number">0.1</span>,</span><br><span class="line">                 regression=<span class="literal">True</span>,</span><br><span class="line">                 max_epochs=<span class="number">1000</span>,</span><br><span class="line">                )</span><br></pre></td></tr></table></figure>
</li>
<li><p>在训练集上训练网络。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">net1.fit(X_train, y_train)</span><br></pre></td></tr></table></figure>
</li>
<li><p>评估训练得到的网络。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line">y_pred = net1.predict(X_test)</span><br><span class="line">y_pred = y_pred.argmax(axis=<span class="number">1</span>)</span><br><span class="line"><span class="keyword">assert</span> len(y_pred) == len(X_test)</span><br><span class="line"><span class="keyword">if</span> len(y_test.shape) &gt; <span class="number">1</span>:</span><br><span class="line">    y_test = y_test.argmax(axis=<span class="number">1</span>)</span><br><span class="line">print(f1_score(y_test, y_pred, average=<span class="string">'macro'</span>))</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h3 id="11-3-GPU优化"><a href="#11-3-GPU优化" class="headerlink" title="11.3 GPU优化"></a>11.3 GPU优化</h3><ul>
<li>使用稀疏矩阵可以将整个神经网络装进内存。</li>
<li>神经网络最核心的计算类型是浮点运算，矩阵操作的大量运算可以并行处理。GPU拥有成千上万个小核，适合并行任务，CPU单核工作速度更快，访问内存效率更高，适合序列化任务。所以用GPU进行计算能够提升训练速度。</li>
</ul>
<h3 id="11-4-应用"><a href="#11-4-应用" class="headerlink" title="11.4 应用"></a>11.4 应用</h3><ul>
<li><p>保留像素结构即行列号，把所有批次图像文件名存储到列表中。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">import</span> numpy <span class="keyword">as</span> np</span><br><span class="line">batches = []</span><br><span class="line"><span class="keyword">for</span> i <span class="keyword">in</span> range(<span class="number">1</span>, <span class="number">6</span>):</span><br><span class="line">    batch_filename = os.path.join(data_folder, <span class="string">"data_batch_&#123;&#125;"</span>.format(i))</span><br><span class="line">    batches.append(unpickle(batch1_filename))</span><br></pre></td></tr></table></figure>
</li>
<li><p>纵向添加每批次数据。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">X = np.vstack([batch[<span class="string">'data'</span>] <span class="keyword">for</span> batch <span class="keyword">in</span> batches])</span><br></pre></td></tr></table></figure>
</li>
<li><p>像素值归一化，并转化为32位浮点数据。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">X = np.array(X) / X.max()</span><br><span class="line">X = X.astype(np.float32)</span><br></pre></td></tr></table></figure>
</li>
<li><p>纵向添加标签数据，转化为一位有效码。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.preprocessing <span class="keyword">import</span> OneHotEncoder</span><br><span class="line">y = np.hstack(batch[<span class="string">'labels'</span>] <span class="keyword">for</span> batch <span class="keyword">in</span> batches).flatten()</span><br><span class="line">y = OneHotEncoder().fit_transform(y.reshape(y.shape[<span class="number">0</span>],<span class="number">1</span>)).todense()</span><br><span class="line">y = y.astype(np.float32)</span><br></pre></td></tr></table></figure>
</li>
<li><p>划分训练集、测试集，调整数组形状以保留原始图像的数据结构。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=<span class="number">0.2</span>) </span><br><span class="line">X_train = X_train.reshape(<span class="number">-1</span>, <span class="number">3</span>, <span class="number">32</span>, <span class="number">32</span>)</span><br><span class="line">X_test = X_test.reshape(<span class="number">-1</span>, <span class="number">3</span>, <span class="number">32</span>, <span class="number">32</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>创建神经网络各层。输入层数据与数据集同型。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> lasagne <span class="keyword">import</span> layers</span><br><span class="line">layers=[</span><br><span class="line">        (<span class="string">'input'</span>, layers.InputLayer),</span><br><span class="line">        (<span class="string">'conv1'</span>, layers.Conv2DLayer),</span><br><span class="line">        (<span class="string">'pool1'</span>, layers.MaxPool2DLayer),</span><br><span class="line">        (<span class="string">'conv2'</span>, layers.Conv2DLayer),</span><br><span class="line">        (<span class="string">'pool2'</span>, layers.MaxPool2DLayer),</span><br><span class="line">        (<span class="string">'conv3'</span>, layers.Conv2DLayer),</span><br><span class="line">        (<span class="string">'pool3'</span>, layers.MaxPool2DLayer),</span><br><span class="line">        (<span class="string">'hidden4'</span>, layers.DenseLayer),</span><br><span class="line">        (<span class="string">'hidden5'</span>, layers.DenseLayer),</span><br><span class="line">        (<span class="string">'output'</span>, layers.DenseLayer),</span><br><span class="line">        ]</span><br></pre></td></tr></table></figure>
</li>
<li><p>创建神经网络。指定输入数据形状，和数据集形状一致，None表示每次使用默认数量图像数据进行训练。设置卷积层大小及卷积窗口大小。设置池化窗口大小。设置隐含层和输出层大小，输出层大小和类别数量一致。输出层设置非线性函数softmax。设置学习速率和冲量，随着数据量的增加，学习速率应下降。分类问题转换为回归问题。训练步数设置为3以便测试。设置verbose为1，每步输出结果，以便了解模型训练进度。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> nolearn.lasagne <span class="keyword">import</span> NeuralNet</span><br><span class="line"><span class="keyword">from</span> lasagne.nonlinearities <span class="keyword">import</span> sigmoid, softmax</span><br><span class="line">nnet = NeuralNet(layers=layers,</span><br><span class="line">                 input_shape=(<span class="literal">None</span>, <span class="number">3</span>, <span class="number">32</span>, <span class="number">32</span>),</span><br><span class="line">                 conv1_num_filters=<span class="number">32</span>,</span><br><span class="line">                 conv1_filter_size=(<span class="number">3</span>, <span class="number">3</span>),</span><br><span class="line">                 conv2_num_filters=<span class="number">64</span>,</span><br><span class="line">                 conv2_filter_size=(<span class="number">2</span>, <span class="number">2</span>),</span><br><span class="line">                 conv3_num_filters=<span class="number">128</span>,</span><br><span class="line">                 conv3_filter_size=(<span class="number">2</span>, <span class="number">2</span>),</span><br><span class="line">                 pool1_pool_size=(<span class="number">2</span>,<span class="number">2</span>),</span><br><span class="line">                 pool2_pool_size=(<span class="number">2</span>,<span class="number">2</span>),</span><br><span class="line">                 pool3_pool_size=(<span class="number">2</span>,<span class="number">2</span>),</span><br><span class="line">                 hidden4_num_units=<span class="number">500</span>,</span><br><span class="line">                 hidden5_num_units=<span class="number">500</span>,</span><br><span class="line">                 output_num_units=<span class="number">10</span>,</span><br><span class="line">                 output_nonlinearity=softmax,</span><br><span class="line">                 update_learning_rate=<span class="number">0.01</span>,</span><br><span class="line">                 update_momentum=<span class="number">0.9</span>,</span><br><span class="line">                 regression=<span class="literal">True</span>,</span><br><span class="line">                 max_epochs=<span class="number">3</span>,</span><br><span class="line">                 verbose=<span class="number">1</span>)</span><br></pre></td></tr></table></figure>
</li>
<li><p>训练神经网络，进行测试。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line">nnet.fit(X_train, y_train)</span><br><span class="line"><span class="keyword">from</span> sklearn.metrics <span class="keyword">import</span> f1_score</span><br><span class="line">y_pred = nnet.predict(X_test)</span><br><span class="line">print(f1_score(y_test.argmax(axis=<span class="number">1</span>), y_pred.argmax(axis=<span class="number">1</span>), average=<span class="string">'micro'</span>))</span><br></pre></td></tr></table></figure>
</li>
</ul>
<h2 id="第十二章-大数据处理"><a href="#第十二章-大数据处理" class="headerlink" title="第十二章 大数据处理"></a>第十二章 大数据处理</h2><h3 id="12-1-大数据"><a href="#12-1-大数据" class="headerlink" title="12.1 大数据"></a>12.1 大数据</h3><ul>
<li>大数据的特点：<ul>
<li>海量：数据量大</li>
<li>高速：数据分析速度快</li>
<li>多样：数据集有多种形式</li>
<li>准确：很难确定采集到的数据是否准确。</li>
</ul>
</li>
<li>大数据无法加载到内存中。</li>
</ul>
<h3 id="12-2-大数据应用场景和目标"><a href="#12-2-大数据应用场景和目标" class="headerlink" title="12.2 大数据应用场景和目标"></a>12.2 大数据应用场景和目标</h3><ul>
<li>应用场景：<ul>
<li>搜索引擎</li>
<li>科学实验</li>
<li>政府数据处理</li>
<li>交通管理</li>
<li>改善客户体验，降低支出</li>
<li>提高公司经营管理的自动化程度，改善产品和服务质量</li>
<li>监测网络流量，寻找大型网络的恶意软件感染。</li>
</ul>
</li>
</ul>
<h3 id="12-3-MapReduce"><a href="#12-3-MapReduce" class="headerlink" title="12.3 MapReduce"></a>12.3 MapReduce</h3><ul>
<li><p>谷歌出于并行计算的需要，提出了MapReduce模型，引入了容错和可伸缩特性，可用于任意大数据集的一般性计算任务。</p>
</li>
<li><p>MapReduce主要分为映射（Map）和规约（Reduce）两步。</p>
</li>
<li><p>MapReduce范式还包括排序和合并两步。</p>
</li>
<li><p>映射这一步，接收一个函数，用这个函数处理列表中的各个元素，返回和之间列表长度相等的列表，新列表的元素为函数的返回结果。</p>
</li>
<li><p>建立sum函数与a之间的映射关系。sums是生成器，在调用前不会计算。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line">a = [[<span class="number">1</span>,<span class="number">2</span>,<span class="number">1</span>], [<span class="number">3</span>,<span class="number">2</span>], [<span class="number">4</span>,<span class="number">9</span>,<span class="number">1</span>,<span class="number">0</span>,<span class="number">2</span>]]</span><br><span class="line">sums = map(sum, a)</span><br><span class="line"><span class="comment"># 等效为：</span></span><br><span class="line">sums = [] </span><br><span class="line"><span class="keyword">for</span> sublist <span class="keyword">in</span> a: </span><br><span class="line">    results = sum(sublist) </span><br><span class="line">    sums.append(results)</span><br></pre></td></tr></table></figure>
</li>
<li><p>规约需要对返回结果的每一个元素应用一个函数，从初始值开始，对初始值和第一个应用指定函数，得到返回结果，然后再对所得到的结果和下一个值应用指定函数，以此类推。规约函数为<code>from functools import reduce</code>三个参数分别为函数的名字，列表和初始值。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">add</span><span class="params">(a, b)</span>:</span></span><br><span class="line">    <span class="keyword">return</span> a + b </span><br><span class="line"><span class="keyword">from</span> functools <span class="keyword">import</span> reduce</span><br><span class="line">print(reduce(add, sums, <span class="number">0</span>))</span><br><span class="line"><span class="comment"># 等价于：</span></span><br><span class="line">initial = <span class="number">0</span></span><br><span class="line">current_result = initial</span><br><span class="line"><span class="keyword">for</span> element <span class="keyword">in</span> sums:</span><br><span class="line">    current_result = add(current_result, element)</span><br></pre></td></tr></table></figure>
</li>
<li><p>为了实现分布式计算，可以在映射这一步把各个二级列表及函数说明分发到不同的计算机上。计算完成后，各计算机把结果返回主计算机。然后主计算机把结果发送给另一台计算机做规约。大大节省了存储空间。</p>
</li>
<li><p>映射函数接收一键值对，返回键值对列表。如接收文档编号文本内容键值对，返回单词词频键值对。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">map_word_count</span><span class="params">(document_id, document)</span>:</span></span><br><span class="line">    counts = defaultdict(int)</span><br><span class="line">    <span class="keyword">for</span> word <span class="keyword">in</span> document.split():</span><br><span class="line">        counts[word] += <span class="number">1</span></span><br><span class="line">    <span class="keyword">for</span> word <span class="keyword">in</span> counts:</span><br><span class="line">        <span class="keyword">yield</span> (word, counts[word])</span><br></pre></td></tr></table></figure>
</li>
<li><p>把每个键所有值聚集到一起。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">shuffle_words</span><span class="params">(results_generators)</span>:</span></span><br><span class="line">    records = defaultdict(list)</span><br><span class="line">    <span class="keyword">for</span> results <span class="keyword">in</span> results_generators:</span><br><span class="line">        <span class="keyword">for</span> word, count <span class="keyword">in</span> results:</span><br><span class="line">            records[word].append(count)</span><br><span class="line">    <span class="keyword">for</span> word <span class="keyword">in</span> records:</span><br><span class="line">        <span class="keyword">yield</span> (word, records[word])</span><br></pre></td></tr></table></figure>
</li>
<li><p>规约接收一键值对，返回另一键值对。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">reduce_counts</span><span class="params">(word, list_of_counts)</span>:</span></span><br><span class="line">    <span class="keyword">return</span> (word, sum(list_of_counts))</span><br></pre></td></tr></table></figure>
</li>
<li><p>获取sklearn的20个新闻语料。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">from</span> sklearn.datasets <span class="keyword">import</span> fetch_20newsgroups</span><br><span class="line">dataset = fetch_20newsgroups(subset=<span class="string">'train'</span>)</span><br><span class="line">documents = dataset.data[:<span class="number">50</span>]</span><br></pre></td></tr></table></figure>
</li>
<li><p>执行映射操作，得到能输出键值对（单词、词频）的生成器。执行shuffle操作，生成单词和该单词在各文档出现次数的列表两项。规约，输出单词和单词在所有文档中的词频。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">map_results = map(map_word_count, range(len(documents)), documents)</span><br><span class="line">shuffle_results = shuffle_words(map_results)</span><br><span class="line">reduce_results = [reduce_counts(word, list_of_counts) <span class="keyword">for</span> word, list_of_counts <span class="keyword">in</span> shuffle_results]</span><br></pre></td></tr></table></figure>
</li>
<li><p>Hadoop是一组包括MapReduce在内的开源工具。主要组件为Hadoop MapReduce。其他处理大数据的工具有如下几种：</p>
<ul>
<li>Hadoop分布式文件系统（HDFS）：该文件系统可以将文件保存到多台计算机上，防范硬件故障，提高带宽。</li>
<li>YARN：用于调度应用和管理计算机集群。</li>
<li>Pig：用于MapReduce的高级语言。</li>
<li>Hive：用于管理数据仓库和进行查询。</li>
<li>HBase：对谷歌分布式数据库BigTable的一种实现。</li>
</ul>
</li>
</ul>
<h3 id="12-4-应用"><a href="#12-4-应用" class="headerlink" title="12.4 应用"></a>12.4 应用</h3><ul>
<li><p>根据博主用词习惯判断博主性别。</p>
</li>
<li><p>MapReduce常用映射对列表中的每一篇文档运行预测模型，使用规约来调整预测结果列表，以便把结果和原文档对应起来。</p>
</li>
<li><p>测试打开并读取博客内容。设置是否在博客中的标记，找到博客开始标签<code>&lt;post&gt;</code>后，将标记设置为True。找到关闭标签<code>&lt;/post&gt;</code>后，将标记值设置为False。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br></pre></td><td class="code"><pre><span class="line">all_posts = []</span><br><span class="line"><span class="keyword">with</span> codecs.open(filename, encoding=<span class="string">'utf8'</span>) <span class="keyword">as</span> inf:</span><br><span class="line">    <span class="comment"># remove leading and trailing whitespace</span></span><br><span class="line">    post_start = <span class="literal">False</span></span><br><span class="line">    post = []</span><br><span class="line">    <span class="keyword">for</span> line <span class="keyword">in</span> inf:</span><br><span class="line">        line = line.strip()</span><br><span class="line">        <span class="keyword">if</span> line == <span class="string">"&lt;post&gt;"</span>:</span><br><span class="line">            post_start = <span class="literal">True</span></span><br><span class="line">        <span class="keyword">elif</span> line == <span class="string">"&lt;/post&gt;"</span>:</span><br><span class="line">            post_start = <span class="literal">False</span></span><br><span class="line">            all_posts.append(<span class="string">"\n"</span>.join(post))</span><br><span class="line">            post = []</span><br><span class="line">        <span class="keyword">elif</span> post_start:</span><br><span class="line">            post.append(line)</span><br></pre></td></tr></table></figure>
</li>
<li><p>使用映射规约任务包mrjob。提供了大部分MapReduce任务所需的标准功能，既能在没有安装Hadoop的本地计算机上进行测试，也能在Hadoop服务器上测试。</p>
</li>
<li><p>创建MRJob的子类从文件中抽取博客内容。映射函数处理每一行，从文件取一行作为输入，最后生成一篇博客的所有内容，每一行都来自同一任务所在处理的文件。获取以环境变量存储的文件名。获取文件名中的性别信息。使用yield生成器表示博主性别和博客内容，便于mrlib跟踪输出。获取所有以51开始的文件，进行测试。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">import</span> os</span><br><span class="line"><span class="keyword">import</span> re</span><br><span class="line"><span class="keyword">from</span> mrjob.job <span class="keyword">import</span> MRJob</span><br><span class="line">word_search_re = re.compile(<span class="string">r"[\w']+"</span>)</span><br><span class="line"><span class="class"><span class="keyword">class</span> <span class="title">ExtractPosts</span><span class="params">(MRJob)</span>:</span></span><br><span class="line">    post_start = <span class="literal">False</span></span><br><span class="line">    post = []</span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">mapper</span><span class="params">(self, key, line)</span>:</span></span><br><span class="line">        filename = os.environ[<span class="string">"map_input_file"</span>]</span><br><span class="line">        gender = filename.split(<span class="string">"."</span>)[<span class="number">1</span>]</span><br><span class="line">        <span class="keyword">try</span>:</span><br><span class="line">            docnum = int(filename[<span class="number">0</span>])</span><br><span class="line">        <span class="keyword">except</span>:</span><br><span class="line">            docnum = <span class="number">8</span></span><br><span class="line">        <span class="keyword">if</span> re.match(<span class="string">r"file://blogs\\51.*"</span>,filename):</span><br><span class="line">            <span class="comment"># remove leading and trailing whitespace</span></span><br><span class="line">            line = line.strip()</span><br><span class="line">            <span class="keyword">if</span> line == <span class="string">"&lt;post&gt;"</span>:</span><br><span class="line">                self.post_start = <span class="literal">True</span></span><br><span class="line">            <span class="keyword">elif</span> line == <span class="string">"&lt;/post&gt;"</span>:</span><br><span class="line">                self.post_start = <span class="literal">False</span></span><br><span class="line">                <span class="keyword">yield</span> gender, repr(<span class="string">"\n"</span>.join(self.post))</span><br><span class="line">                self.post = []</span><br><span class="line">            <span class="keyword">elif</span> self.post_start:</span><br><span class="line">                self.post.append(line)</span><br></pre></td></tr></table></figure>
</li>
<li><p>执行MapReduce任务。<code>python .\extract_posts.py blogs/51* --output-dir=51blogs/blogposts</code></p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">if</span> __name__ == <span class="string">'__main__'</span>:</span><br><span class="line">    ExtractPosts.run()</span><br></pre></td></tr></table></figure>
</li>
<li><p><code>from mrjob.step import MRStep</code>用MRStep管理MapReduce中的每一步操作。任务分为三步：映射、规约、再映射和规约。</p>
</li>
<li><p><code>word_search_re = re.compile(r&quot;[\w&#39;]+&quot;)</code>创建用于匹配单词的正则表达式，并对其进行编译，用来查找单词的边界。</p>
</li>
<li><p>创建新类，用于训练朴素贝叶斯分类器。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br></pre></td><td class="code"><pre><span class="line"><span class="class"><span class="keyword">class</span> <span class="title">NaiveBayesTrainer</span><span class="params">(MRJob)</span>:</span></span><br><span class="line">    <span class="comment"># 定义MapReduce任务的各个步骤：第一步抽取单词出现的频率，第二步比较一个单词在男女博主所写博客中出现的概率，旋转较大的作为分类结果，写入输出文件。每一步中定义映射和规约函数。</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">steps</span><span class="params">(self)</span>:</span></span><br><span class="line">        <span class="keyword">return</span> [</span><br><span class="line">            MRStep(mapper=self.extract_words_mapping,</span><br><span class="line">                   reducer=self.reducer_count_words),</span><br><span class="line">            MRStep(reducer=self.compare_words_reducer),</span><br><span class="line">            ]</span><br><span class="line"><span class="comment"># 接收一条博客数据，获取里面所有单词，返回1. / len(all_words)，以便后续求词频，输出博主性别。使用eval将字符串转换为列表，但不安全，建议用json。</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">extract_words_mapping</span><span class="params">(self, key, value)</span>:</span></span><br><span class="line">        tokens = value.split()</span><br><span class="line">        gender = eval(tokens[<span class="number">0</span>])</span><br><span class="line">        blog_post = eval(<span class="string">" "</span>.join(tokens[<span class="number">1</span>:]))</span><br><span class="line">        all_words = word_search_re.findall(blog_post)</span><br><span class="line">        all_words = [word.lower() <span class="keyword">for</span> word <span class="keyword">in</span> all_words]</span><br><span class="line">        <span class="comment">#for word in all_words:</span></span><br><span class="line">        <span class="keyword">for</span> word <span class="keyword">in</span> all_words:</span><br><span class="line">            <span class="comment">#yield "&#123;0&#125;:&#123;1&#125;".format(gender, word.lower()), 1</span></span><br><span class="line">            <span class="comment">#yield (gender, word.lower()), (1. / len(all_words))</span></span><br><span class="line">            <span class="comment"># Occurence probability</span></span><br><span class="line">            <span class="keyword">yield</span> (gender, word), <span class="number">1.</span> / len(all_words)</span><br><span class="line"><span class="comment"># 汇总每个性别使用每个单词的频率，把键改为单词。</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">reducer_count_words</span><span class="params">(self, key, counts)</span>:</span></span><br><span class="line">        s = sum(counts)</span><br><span class="line">        gender, word = key <span class="comment">#.split(":")</span></span><br><span class="line">        <span class="keyword">yield</span> word, (gender, s)</span><br><span class="line"><span class="comment"># 数据将作为一致性映射类型直接传入规约函数中，规约函数会将每个单词在所有文章中的出现频率按照性别汇集到一起，输出单词和词频字典。</span></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">compare_words_reducer</span><span class="params">(self, word, values)</span>:</span></span><br><span class="line">        per_gender = &#123;&#125;</span><br><span class="line">        <span class="keyword">for</span> value <span class="keyword">in</span> values:</span><br><span class="line">            gender, s = value</span><br><span class="line">            per_gender[gender] = s</span><br><span class="line">        <span class="keyword">yield</span> word, per_gender</span><br><span class="line"></span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">ratio_mapper</span><span class="params">(self, word, value)</span>:</span></span><br><span class="line">        counts = dict(value)</span><br><span class="line">        sum_of_counts = float(np.mean(counts.values()))</span><br><span class="line">        maximum_score = max(counts.items(), key=itemgetter(<span class="number">1</span>))</span><br><span class="line">        current_ratio = maximum_score[<span class="number">1</span>] / sum_of_counts</span><br><span class="line">        <span class="keyword">yield</span> <span class="literal">None</span>, (word, sum_of_counts, value)</span><br><span class="line">    </span><br><span class="line">    <span class="function"><span class="keyword">def</span> <span class="title">sorter_reducer</span><span class="params">(self, key, values)</span>:</span></span><br><span class="line">        ranked_list = sorted(values, key=itemgetter(<span class="number">1</span>), reverse=<span class="literal">True</span>)</span><br><span class="line">        n_printed = <span class="number">0</span></span><br><span class="line">        <span class="keyword">for</span> word, sum_of_counts, scores <span class="keyword">in</span> ranked_list:</span><br><span class="line">            <span class="keyword">if</span> n_printed &lt; <span class="number">20</span>:</span><br><span class="line">                print((n_printed + <span class="number">1</span>), word, scores)</span><br><span class="line">                n_printed += <span class="number">1</span></span><br><span class="line">            <span class="keyword">yield</span> word, dict(scores)</span><br></pre></td></tr></table></figure>
</li>
<li><p>运行代码，训练朴素贝叶斯模型。<code>python .\nb_train.py 51blogs/blogposts/ --output-dir=models/</code></p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">if</span> __name__ == <span class="string">'__main__'</span>:</span><br><span class="line">    NaiveBayesTrainer.run()</span><br></pre></td></tr></table></figure>
</li>
<li><p>用命令<code>cat * &gt; model.txt</code>将数据文件内容追加到model.txt中。</p>
</li>
<li><p>重新定义查找单词的正则表达式。<code>word_search_re = re.compile(r&quot;[\w&#39;]+&quot;)</code></p>
</li>
<li><p>声明从指定文件名加载模型的函数。模型是一个值为字典的字典。将模型的每一行分为两部分，用eval函数获得实际的值，它们之前是用repr函数存储的。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">load_model</span><span class="params">(model_filename)</span>:</span></span><br><span class="line">    model = defaultdict(<span class="keyword">lambda</span>: defaultdict(float))</span><br><span class="line">    <span class="keyword">with</span> open(model_filename) <span class="keyword">as</span> inf:</span><br><span class="line">        <span class="keyword">for</span> line <span class="keyword">in</span> inf:</span><br><span class="line">            word, values = line.split(maxsplit=<span class="number">1</span>)</span><br><span class="line">            word = eval(word)</span><br><span class="line">            values = eval(values)</span><br><span class="line">            model[word] = values</span><br><span class="line">    <span class="keyword">return</span> model</span><br></pre></td></tr></table></figure>
</li>
<li><p>加载实际的模型。中文系统要注意另存为utf8。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">model_filename = os.path.join(<span class="string">"models"</span>, <span class="string">"model.txt"</span>)</span><br><span class="line">model = load_model(model_filename)</span><br></pre></td></tr></table></figure>
</li>
<li><p>创建使用模型做预测的函数。使用log防止下溢，对于模型中不存在的词，给出默认概率1e-5。</p>
<figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">def</span> <span class="title">nb_predict</span><span class="params">(model, document)</span>:</span></span><br><span class="line">    words = word_search_re.findall(document)</span><br><span class="line">    probabilities = defaultdict(<span class="keyword">lambda</span> : <span class="number">0</span>)</span><br><span class="line">    <span class="keyword">for</span> word <span class="keyword">in</span> set(words):</span><br><span class="line">        probabilities[<span class="string">"male"</span>] += np.log(model[word].get(<span class="string">"male"</span>, <span class="number">1e-5</span>))</span><br><span class="line">        probabilities[<span class="string">"female"</span>] += np.log(model[word].get(<span class="string">"female"</span>, <span class="number">1e-5</span>))</span><br><span class="line">    <span class="comment"># Now find the most likely gender</span></span><br><span class="line">    most_likely_genders = sorted(probabilities.items(), key=itemgetter(<span class="number">1</span>), reverse=<span class="literal">True</span>)</span><br><span class="line">    <span class="keyword">return</span> most_likely_genders[<span class="number">0</span>][<span class="number">0</span>]</span><br></pre></td></tr></table></figure>
</li>
</ul>

      

      
        <div class="page-reward">
          <a href="javascript:;" class="page-reward-btn tooltip-top">
            <div class="tooltip tooltip-east">
            <span class="tooltip-item">
              赏
            </span>
            <span class="tooltip-content">
              <span class="tooltip-text">
                <span class="tooltip-inner">
                  <p class="reward-p"><i class="icon icon-quo-left"></i>谢谢你请我吃糖果<i class="icon icon-quo-right"></i></p>
                  <div class="reward-box">
                    
                    <div class="reward-box-item">
                      <img class="reward-img" src="/assets/image/alipay.jpg">
                      <span class="reward-type">支付宝</span>
                    </div>
                    
                    
                    <div class="reward-box-item">
                      <img class="reward-img" src="/assets/image/weixinfukuan.jpg">
                      <span class="reward-type">微信</span>
                    </div>
                    
                  </div>
                </span>
              </span>
            </span>
          </div>
          </a>
        </div>
      
    </div>
    <div class="article-info article-info-index">
      
      
	<div class="article-tag tagcloud">
		<i class="icon-price-tags icon"></i>
		<ul class="article-tag-list">
			 
        		<li class="article-tag-list-item">
        			<a href="javascript:void(0)" class="js-tag article-tag-list-link color4">Python笔记</a>
        		</li>
      		
		</ul>
	</div>

      

      

      
        
<div class="share-btn share-icons tooltip-left">
  <div class="tooltip tooltip-east">
    <span class="tooltip-item">
      <a href="javascript:;" class="share-sns share-outer">
        <i class="icon icon-share"></i>
      </a>
    </span>
    <span class="tooltip-content">
      <div class="share-wrap">
        <div class="share-icons">
          <a class="weibo share-sns" href="javascript:;" data-type="weibo">
            <i class="icon icon-weibo"></i>
          </a>
          <a class="weixin share-sns wxFab" href="javascript:;" data-type="weixin">
            <i class="icon icon-weixin"></i>
          </a>
          <a class="qq share-sns" href="javascript:;" data-type="qq">
            <i class="icon icon-qq"></i>
          </a>
          <a class="douban share-sns" href="javascript:;" data-type="douban">
            <i class="icon icon-douban"></i>
          </a>
          <a class="qzone share-sns" href="javascript:;" data-type="qzone">
            <i class="icon icon-qzone"></i>
          </a>
          <a class="facebook share-sns" href="javascript:;" data-type="facebook">
            <i class="icon icon-facebook"></i>
          </a>
          <a class="twitter share-sns" href="javascript:;" data-type="twitter">
            <i class="icon icon-twitter"></i>
          </a>
          <a class="google share-sns" href="javascript:;" data-type="google">
            <i class="icon icon-google"></i>
          </a>
        </div>
      </div>
    </span>
  </div>
</div>

<div class="page-modal wx-share js-wx-box">
    <a class="close js-modal-close" href="javascript:;"><i class="icon icon-close"></i></a>
    <p>扫一扫，分享到微信</p>
    <div class="wx-qrcode">
      <img src="//pan.baidu.com/share/qrcode?url=http://www.guzhipin.top/2019/08/04/Python数据挖掘入门与实践/" alt="微信分享二维码">
    </div>
</div>

<div class="mask js-mask"></div>
      
      <div class="clearfix"></div>
    </div>
  </div>
</article>

  
<nav id="article-nav">
  
    <a href="/2019/08/22/《白帽子讲Web安全》笔记/" id="article-nav-newer" class="article-nav-link-wrap">
      <i class="icon-circle-left"></i>
      <div class="article-nav-title">
        
          《白帽子讲Web安全》笔记
        
      </div>
    </a>
  
  
    <a href="/2019/04/13/测试打赏/" id="article-nav-older" class="article-nav-link-wrap">
      <div class="article-nav-title">测试打赏</div>
      <i class="icon-circle-right"></i>
    </a>
  
</nav>


<aside class="wrap-side-operation">
    <div class="mod-side-operation">
        
        <div class="jump-container" id="js-jump-container" style="display:none;">
            <a href="javascript:void(0)" class="mod-side-operation__jump-to-top">
                <i class="icon-font icon-back"></i>
            </a>
            <div id="js-jump-plan-container" class="jump-plan-container" style="top: -11px;">
                <i class="icon-font icon-plane jump-plane"></i>
            </div>
        </div>
        
        
    </div>
</aside>



  
  
  

  

  

  


          </div>
        </div>
      </div>
      <footer id="footer">
  <div class="outer">
    <div id="footer-info">
    	<div class="footer-left">
    		&copy; 2019 quekai
    	</div>
      	<div class="footer-right">
      		<a href="http://hexo.io/" target="_blank">Hexo</a>  Theme <a href="https://github.com/litten/hexo-theme-yilia" target="_blank">Yilia</a> by Litten
      	</div>
    </div>
  </div>
</footer>
    </div>
    <script>
	var yiliaConfig = {
		mathjax: false,
		isHome: false,
		isPost: true,
		isArchive: false,
		isTag: false,
		isCategory: false,
		open_in_new: false,
		toc_hide_index: true,
		root: "/",
		innerArchive: true,
		showTags: false
	}
</script>

<script>!function(t){function n(e){if(r[e])return r[e].exports;var i=r[e]={exports:{},id:e,loaded:!1};return t[e].call(i.exports,i,i.exports,n),i.loaded=!0,i.exports}var r={};n.m=t,n.c=r,n.p="./",n(0)}([function(t,n,r){r(195),t.exports=r(191)},function(t,n,r){var e=r(3),i=r(52),o=r(27),u=r(28),c=r(53),f="prototype",a=function(t,n,r){var s,l,h,v,p=t&a.F,d=t&a.G,y=t&a.S,g=t&a.P,b=t&a.B,m=d?e:y?e[n]||(e[n]={}):(e[n]||{})[f],x=d?i:i[n]||(i[n]={}),w=x[f]||(x[f]={});d&&(r=n);for(s in r)l=!p&&m&&void 0!==m[s],h=(l?m:r)[s],v=b&&l?c(h,e):g&&"function"==typeof h?c(Function.call,h):h,m&&u(m,s,h,t&a.U),x[s]!=h&&o(x,s,v),g&&w[s]!=h&&(w[s]=h)};e.core=i,a.F=1,a.G=2,a.S=4,a.P=8,a.B=16,a.W=32,a.U=64,a.R=128,t.exports=a},function(t,n,r){var e=r(6);t.exports=function(t){if(!e(t))throw TypeError(t+" is not an object!");return t}},function(t,n){var r=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=r)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n){var r=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=r)},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n,r){var e=r(126)("wks"),i=r(76),o=r(3).Symbol,u="function"==typeof o;(t.exports=function(t){return e[t]||(e[t]=u&&o[t]||(u?o:i)("Symbol."+t))}).store=e},function(t,n){var r={}.hasOwnProperty;t.exports=function(t,n){return r.call(t,n)}},function(t,n,r){var e=r(94),i=r(33);t.exports=function(t){return e(i(t))}},function(t,n,r){t.exports=!r(4)(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},function(t,n,r){var e=r(2),i=r(167),o=r(50),u=Object.defineProperty;n.f=r(10)?Object.defineProperty:function(t,n,r){if(e(t),n=o(n,!0),e(r),i)try{return u(t,n,r)}catch(t){}if("get"in r||"set"in r)throw TypeError("Accessors not supported!");return"value"in r&&(t[n]=r.value),t}},function(t,n,r){t.exports=!r(18)(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},function(t,n,r){var e=r(14),i=r(22);t.exports=r(12)?function(t,n,r){return e.f(t,n,i(1,r))}:function(t,n,r){return t[n]=r,t}},function(t,n,r){var e=r(20),i=r(58),o=r(42),u=Object.defineProperty;n.f=r(12)?Object.defineProperty:function(t,n,r){if(e(t),n=o(n,!0),e(r),i)try{return u(t,n,r)}catch(t){}if("get"in r||"set"in r)throw TypeError("Accessors not supported!");return"value"in r&&(t[n]=r.value),t}},function(t,n,r){var e=r(40)("wks"),i=r(23),o=r(5).Symbol,u="function"==typeof o;(t.exports=function(t){return e[t]||(e[t]=u&&o[t]||(u?o:i)("Symbol."+t))}).store=e},function(t,n,r){var e=r(67),i=Math.min;t.exports=function(t){return t>0?i(e(t),9007199254740991):0}},function(t,n,r){var e=r(46);t.exports=function(t){return Object(e(t))}},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n,r){var e=r(63),i=r(34);t.exports=Object.keys||function(t){return e(t,i)}},function(t,n,r){var e=r(21);t.exports=function(t){if(!e(t))throw TypeError(t+" is not an object!");return t}},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n){t.exports=function(t,n){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:n}}},function(t,n){var r=0,e=Math.random();t.exports=function(t){return"Symbol(".concat(void 0===t?"":t,")_",(++r+e).toString(36))}},function(t,n){var r={}.hasOwnProperty;t.exports=function(t,n){return r.call(t,n)}},function(t,n){var r=t.exports={version:"2.4.0"};"number"==typeof __e&&(__e=r)},function(t,n){t.exports=function(t){if("function"!=typeof t)throw TypeError(t+" is not a function!");return t}},function(t,n,r){var e=r(11),i=r(66);t.exports=r(10)?function(t,n,r){return e.f(t,n,i(1,r))}:function(t,n,r){return t[n]=r,t}},function(t,n,r){var e=r(3),i=r(27),o=r(24),u=r(76)("src"),c="toString",f=Function[c],a=(""+f).split(c);r(52).inspectSource=function(t){return f.call(t)},(t.exports=function(t,n,r,c){var f="function"==typeof r;f&&(o(r,"name")||i(r,"name",n)),t[n]!==r&&(f&&(o(r,u)||i(r,u,t[n]?""+t[n]:a.join(String(n)))),t===e?t[n]=r:c?t[n]?t[n]=r:i(t,n,r):(delete t[n],i(t,n,r)))})(Function.prototype,c,function(){return"function"==typeof this&&this[u]||f.call(this)})},function(t,n,r){var e=r(1),i=r(4),o=r(46),u=function(t,n,r,e){var i=String(o(t)),u="<"+n;return""!==r&&(u+=" "+r+'="'+String(e).replace(/"/g,"&quot;")+'"'),u+">"+i+"</"+n+">"};t.exports=function(t,n){var r={};r[t]=n(u),e(e.P+e.F*i(function(){var n=""[t]('"');return n!==n.toLowerCase()||n.split('"').length>3}),"String",r)}},function(t,n,r){var e=r(115),i=r(46);t.exports=function(t){return e(i(t))}},function(t,n,r){var e=r(116),i=r(66),o=r(30),u=r(50),c=r(24),f=r(167),a=Object.getOwnPropertyDescriptor;n.f=r(10)?a:function(t,n){if(t=o(t),n=u(n,!0),f)try{return a(t,n)}catch(t){}if(c(t,n))return i(!e.f.call(t,n),t[n])}},function(t,n,r){var e=r(24),i=r(17),o=r(145)("IE_PROTO"),u=Object.prototype;t.exports=Object.getPrototypeOf||function(t){return t=i(t),e(t,o)?t[o]:"function"==typeof t.constructor&&t instanceof t.constructor?t.constructor.prototype:t instanceof Object?u:null}},function(t,n){t.exports=function(t){if(void 0==t)throw TypeError("Can't call method on  "+t);return t}},function(t,n){t.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},function(t,n){t.exports={}},function(t,n){t.exports=!0},function(t,n){n.f={}.propertyIsEnumerable},function(t,n,r){var e=r(14).f,i=r(8),o=r(15)("toStringTag");t.exports=function(t,n,r){t&&!i(t=r?t:t.prototype,o)&&e(t,o,{configurable:!0,value:n})}},function(t,n,r){var e=r(40)("keys"),i=r(23);t.exports=function(t){return e[t]||(e[t]=i(t))}},function(t,n,r){var e=r(5),i="__core-js_shared__",o=e[i]||(e[i]={});t.exports=function(t){return o[t]||(o[t]={})}},function(t,n){var r=Math.ceil,e=Math.floor;t.exports=function(t){return isNaN(t=+t)?0:(t>0?e:r)(t)}},function(t,n,r){var e=r(21);t.exports=function(t,n){if(!e(t))return t;var r,i;if(n&&"function"==typeof(r=t.toString)&&!e(i=r.call(t)))return i;if("function"==typeof(r=t.valueOf)&&!e(i=r.call(t)))return i;if(!n&&"function"==typeof(r=t.toString)&&!e(i=r.call(t)))return i;throw TypeError("Can't convert object to primitive value")}},function(t,n,r){var e=r(5),i=r(25),o=r(36),u=r(44),c=r(14).f;t.exports=function(t){var n=i.Symbol||(i.Symbol=o?{}:e.Symbol||{});"_"==t.charAt(0)||t in n||c(n,t,{value:u.f(t)})}},function(t,n,r){n.f=r(15)},function(t,n){var r={}.toString;t.exports=function(t){return r.call(t).slice(8,-1)}},function(t,n){t.exports=function(t){if(void 0==t)throw TypeError("Can't call method on  "+t);return t}},function(t,n,r){var e=r(4);t.exports=function(t,n){return!!t&&e(function(){n?t.call(null,function(){},1):t.call(null)})}},function(t,n,r){var e=r(53),i=r(115),o=r(17),u=r(16),c=r(203);t.exports=function(t,n){var r=1==t,f=2==t,a=3==t,s=4==t,l=6==t,h=5==t||l,v=n||c;return function(n,c,p){for(var d,y,g=o(n),b=i(g),m=e(c,p,3),x=u(b.length),w=0,S=r?v(n,x):f?v(n,0):void 0;x>w;w++)if((h||w in b)&&(d=b[w],y=m(d,w,g),t))if(r)S[w]=y;else if(y)switch(t){case 3:return!0;case 5:return d;case 6:return w;case 2:S.push(d)}else if(s)return!1;return l?-1:a||s?s:S}}},function(t,n,r){var e=r(1),i=r(52),o=r(4);t.exports=function(t,n){var r=(i.Object||{})[t]||Object[t],u={};u[t]=n(r),e(e.S+e.F*o(function(){r(1)}),"Object",u)}},function(t,n,r){var e=r(6);t.exports=function(t,n){if(!e(t))return t;var r,i;if(n&&"function"==typeof(r=t.toString)&&!e(i=r.call(t)))return i;if("function"==typeof(r=t.valueOf)&&!e(i=r.call(t)))return i;if(!n&&"function"==typeof(r=t.toString)&&!e(i=r.call(t)))return i;throw TypeError("Can't convert object to primitive value")}},function(t,n,r){var e=r(5),i=r(25),o=r(91),u=r(13),c="prototype",f=function(t,n,r){var a,s,l,h=t&f.F,v=t&f.G,p=t&f.S,d=t&f.P,y=t&f.B,g=t&f.W,b=v?i:i[n]||(i[n]={}),m=b[c],x=v?e:p?e[n]:(e[n]||{})[c];v&&(r=n);for(a in r)(s=!h&&x&&void 0!==x[a])&&a in b||(l=s?x[a]:r[a],b[a]=v&&"function"!=typeof x[a]?r[a]:y&&s?o(l,e):g&&x[a]==l?function(t){var n=function(n,r,e){if(this instanceof t){switch(arguments.length){case 0:return new t;case 1:return new t(n);case 2:return new t(n,r)}return new t(n,r,e)}return t.apply(this,arguments)};return n[c]=t[c],n}(l):d&&"function"==typeof l?o(Function.call,l):l,d&&((b.virtual||(b.virtual={}))[a]=l,t&f.R&&m&&!m[a]&&u(m,a,l)))};f.F=1,f.G=2,f.S=4,f.P=8,f.B=16,f.W=32,f.U=64,f.R=128,t.exports=f},function(t,n){var r=t.exports={version:"2.4.0"};"number"==typeof __e&&(__e=r)},function(t,n,r){var e=r(26);t.exports=function(t,n,r){if(e(t),void 0===n)return t;switch(r){case 1:return function(r){return t.call(n,r)};case 2:return function(r,e){return t.call(n,r,e)};case 3:return function(r,e,i){return t.call(n,r,e,i)}}return function(){return t.apply(n,arguments)}}},function(t,n,r){var e=r(183),i=r(1),o=r(126)("metadata"),u=o.store||(o.store=new(r(186))),c=function(t,n,r){var i=u.get(t);if(!i){if(!r)return;u.set(t,i=new e)}var o=i.get(n);if(!o){if(!r)return;i.set(n,o=new e)}return o},f=function(t,n,r){var e=c(n,r,!1);return void 0!==e&&e.has(t)},a=function(t,n,r){var e=c(n,r,!1);return void 0===e?void 0:e.get(t)},s=function(t,n,r,e){c(r,e,!0).set(t,n)},l=function(t,n){var r=c(t,n,!1),e=[];return r&&r.forEach(function(t,n){e.push(n)}),e},h=function(t){return void 0===t||"symbol"==typeof t?t:String(t)},v=function(t){i(i.S,"Reflect",t)};t.exports={store:u,map:c,has:f,get:a,set:s,keys:l,key:h,exp:v}},function(t,n,r){"use strict";if(r(10)){var e=r(69),i=r(3),o=r(4),u=r(1),c=r(127),f=r(152),a=r(53),s=r(68),l=r(66),h=r(27),v=r(73),p=r(67),d=r(16),y=r(75),g=r(50),b=r(24),m=r(180),x=r(114),w=r(6),S=r(17),_=r(137),O=r(70),E=r(32),P=r(71).f,j=r(154),F=r(76),M=r(7),A=r(48),N=r(117),T=r(146),I=r(155),k=r(80),L=r(123),R=r(74),C=r(130),D=r(160),U=r(11),W=r(31),G=U.f,B=W.f,V=i.RangeError,z=i.TypeError,q=i.Uint8Array,K="ArrayBuffer",J="Shared"+K,Y="BYTES_PER_ELEMENT",H="prototype",$=Array[H],X=f.ArrayBuffer,Q=f.DataView,Z=A(0),tt=A(2),nt=A(3),rt=A(4),et=A(5),it=A(6),ot=N(!0),ut=N(!1),ct=I.values,ft=I.keys,at=I.entries,st=$.lastIndexOf,lt=$.reduce,ht=$.reduceRight,vt=$.join,pt=$.sort,dt=$.slice,yt=$.toString,gt=$.toLocaleString,bt=M("iterator"),mt=M("toStringTag"),xt=F("typed_constructor"),wt=F("def_constructor"),St=c.CONSTR,_t=c.TYPED,Ot=c.VIEW,Et="Wrong length!",Pt=A(1,function(t,n){return Tt(T(t,t[wt]),n)}),jt=o(function(){return 1===new q(new Uint16Array([1]).buffer)[0]}),Ft=!!q&&!!q[H].set&&o(function(){new q(1).set({})}),Mt=function(t,n){if(void 0===t)throw z(Et);var r=+t,e=d(t);if(n&&!m(r,e))throw V(Et);return e},At=function(t,n){var r=p(t);if(r<0||r%n)throw V("Wrong offset!");return r},Nt=function(t){if(w(t)&&_t in t)return t;throw z(t+" is not a typed array!")},Tt=function(t,n){if(!(w(t)&&xt in t))throw z("It is not a typed array constructor!");return new t(n)},It=function(t,n){return kt(T(t,t[wt]),n)},kt=function(t,n){for(var r=0,e=n.length,i=Tt(t,e);e>r;)i[r]=n[r++];return i},Lt=function(t,n,r){G(t,n,{get:function(){return this._d[r]}})},Rt=function(t){var n,r,e,i,o,u,c=S(t),f=arguments.length,s=f>1?arguments[1]:void 0,l=void 0!==s,h=j(c);if(void 0!=h&&!_(h)){for(u=h.call(c),e=[],n=0;!(o=u.next()).done;n++)e.push(o.value);c=e}for(l&&f>2&&(s=a(s,arguments[2],2)),n=0,r=d(c.length),i=Tt(this,r);r>n;n++)i[n]=l?s(c[n],n):c[n];return i},Ct=function(){for(var t=0,n=arguments.length,r=Tt(this,n);n>t;)r[t]=arguments[t++];return r},Dt=!!q&&o(function(){gt.call(new q(1))}),Ut=function(){return gt.apply(Dt?dt.call(Nt(this)):Nt(this),arguments)},Wt={copyWithin:function(t,n){return D.call(Nt(this),t,n,arguments.length>2?arguments[2]:void 0)},every:function(t){return rt(Nt(this),t,arguments.length>1?arguments[1]:void 0)},fill:function(t){return C.apply(Nt(this),arguments)},filter:function(t){return It(this,tt(Nt(this),t,arguments.length>1?arguments[1]:void 0))},find:function(t){return et(Nt(this),t,arguments.length>1?arguments[1]:void 0)},findIndex:function(t){return it(Nt(this),t,arguments.length>1?arguments[1]:void 0)},forEach:function(t){Z(Nt(this),t,arguments.length>1?arguments[1]:void 0)},indexOf:function(t){return ut(Nt(this),t,arguments.length>1?arguments[1]:void 0)},includes:function(t){return ot(Nt(this),t,arguments.length>1?arguments[1]:void 0)},join:function(t){return vt.apply(Nt(this),arguments)},lastIndexOf:function(t){return st.apply(Nt(this),arguments)},map:function(t){return Pt(Nt(this),t,arguments.length>1?arguments[1]:void 0)},reduce:function(t){return lt.apply(Nt(this),arguments)},reduceRight:function(t){return ht.apply(Nt(this),arguments)},reverse:function(){for(var t,n=this,r=Nt(n).length,e=Math.floor(r/2),i=0;i<e;)t=n[i],n[i++]=n[--r],n[r]=t;return n},some:function(t){return nt(Nt(this),t,arguments.length>1?arguments[1]:void 0)},sort:function(t){return pt.call(Nt(this),t)},subarray:function(t,n){var r=Nt(this),e=r.length,i=y(t,e);return new(T(r,r[wt]))(r.buffer,r.byteOffset+i*r.BYTES_PER_ELEMENT,d((void 0===n?e:y(n,e))-i))}},Gt=function(t,n){return It(this,dt.call(Nt(this),t,n))},Bt=function(t){Nt(this);var n=At(arguments[1],1),r=this.length,e=S(t),i=d(e.length),o=0;if(i+n>r)throw V(Et);for(;o<i;)this[n+o]=e[o++]},Vt={entries:function(){return at.call(Nt(this))},keys:function(){return ft.call(Nt(this))},values:function(){return ct.call(Nt(this))}},zt=function(t,n){return w(t)&&t[_t]&&"symbol"!=typeof n&&n in t&&String(+n)==String(n)},qt=function(t,n){return zt(t,n=g(n,!0))?l(2,t[n]):B(t,n)},Kt=function(t,n,r){return!(zt(t,n=g(n,!0))&&w(r)&&b(r,"value"))||b(r,"get")||b(r,"set")||r.configurable||b(r,"writable")&&!r.writable||b(r,"enumerable")&&!r.enumerable?G(t,n,r):(t[n]=r.value,t)};St||(W.f=qt,U.f=Kt),u(u.S+u.F*!St,"Object",{getOwnPropertyDescriptor:qt,defineProperty:Kt}),o(function(){yt.call({})})&&(yt=gt=function(){return vt.call(this)});var Jt=v({},Wt);v(Jt,Vt),h(Jt,bt,Vt.values),v(Jt,{slice:Gt,set:Bt,constructor:function(){},toString:yt,toLocaleString:Ut}),Lt(Jt,"buffer","b"),Lt(Jt,"byteOffset","o"),Lt(Jt,"byteLength","l"),Lt(Jt,"length","e"),G(Jt,mt,{get:function(){return this[_t]}}),t.exports=function(t,n,r,f){f=!!f;var a=t+(f?"Clamped":"")+"Array",l="Uint8Array"!=a,v="get"+t,p="set"+t,y=i[a],g=y||{},b=y&&E(y),m=!y||!c.ABV,S={},_=y&&y[H],j=function(t,r){var e=t._d;return e.v[v](r*n+e.o,jt)},F=function(t,r,e){var i=t._d;f&&(e=(e=Math.round(e))<0?0:e>255?255:255&e),i.v[p](r*n+i.o,e,jt)},M=function(t,n){G(t,n,{get:function(){return j(this,n)},set:function(t){return F(this,n,t)},enumerable:!0})};m?(y=r(function(t,r,e,i){s(t,y,a,"_d");var o,u,c,f,l=0,v=0;if(w(r)){if(!(r instanceof X||(f=x(r))==K||f==J))return _t in r?kt(y,r):Rt.call(y,r);o=r,v=At(e,n);var p=r.byteLength;if(void 0===i){if(p%n)throw V(Et);if((u=p-v)<0)throw V(Et)}else if((u=d(i)*n)+v>p)throw V(Et);c=u/n}else c=Mt(r,!0),u=c*n,o=new X(u);for(h(t,"_d",{b:o,o:v,l:u,e:c,v:new Q(o)});l<c;)M(t,l++)}),_=y[H]=O(Jt),h(_,"constructor",y)):L(function(t){new y(null),new y(t)},!0)||(y=r(function(t,r,e,i){s(t,y,a);var o;return w(r)?r instanceof X||(o=x(r))==K||o==J?void 0!==i?new g(r,At(e,n),i):void 0!==e?new g(r,At(e,n)):new g(r):_t in r?kt(y,r):Rt.call(y,r):new g(Mt(r,l))}),Z(b!==Function.prototype?P(g).concat(P(b)):P(g),function(t){t in y||h(y,t,g[t])}),y[H]=_,e||(_.constructor=y));var A=_[bt],N=!!A&&("values"==A.name||void 0==A.name),T=Vt.values;h(y,xt,!0),h(_,_t,a),h(_,Ot,!0),h(_,wt,y),(f?new y(1)[mt]==a:mt in _)||G(_,mt,{get:function(){return a}}),S[a]=y,u(u.G+u.W+u.F*(y!=g),S),u(u.S,a,{BYTES_PER_ELEMENT:n,from:Rt,of:Ct}),Y in _||h(_,Y,n),u(u.P,a,Wt),R(a),u(u.P+u.F*Ft,a,{set:Bt}),u(u.P+u.F*!N,a,Vt),u(u.P+u.F*(_.toString!=yt),a,{toString:yt}),u(u.P+u.F*o(function(){new y(1).slice()}),a,{slice:Gt}),u(u.P+u.F*(o(function(){return[1,2].toLocaleString()!=new y([1,2]).toLocaleString()})||!o(function(){_.toLocaleString.call([1,2])})),a,{toLocaleString:Ut}),k[a]=N?A:T,e||N||h(_,bt,T)}}else t.exports=function(){}},function(t,n){var r={}.toString;t.exports=function(t){return r.call(t).slice(8,-1)}},function(t,n,r){var e=r(21),i=r(5).document,o=e(i)&&e(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n,r){t.exports=!r(12)&&!r(18)(function(){return 7!=Object.defineProperty(r(57)("div"),"a",{get:function(){return 7}}).a})},function(t,n,r){"use strict";var e=r(36),i=r(51),o=r(64),u=r(13),c=r(8),f=r(35),a=r(96),s=r(38),l=r(103),h=r(15)("iterator"),v=!([].keys&&"next"in[].keys()),p="keys",d="values",y=function(){return this};t.exports=function(t,n,r,g,b,m,x){a(r,n,g);var w,S,_,O=function(t){if(!v&&t in F)return F[t];switch(t){case p:case d:return function(){return new r(this,t)}}return function(){return new r(this,t)}},E=n+" Iterator",P=b==d,j=!1,F=t.prototype,M=F[h]||F["@@iterator"]||b&&F[b],A=M||O(b),N=b?P?O("entries"):A:void 0,T="Array"==n?F.entries||M:M;if(T&&(_=l(T.call(new t)))!==Object.prototype&&(s(_,E,!0),e||c(_,h)||u(_,h,y)),P&&M&&M.name!==d&&(j=!0,A=function(){return M.call(this)}),e&&!x||!v&&!j&&F[h]||u(F,h,A),f[n]=A,f[E]=y,b)if(w={values:P?A:O(d),keys:m?A:O(p),entries:N},x)for(S in w)S in F||o(F,S,w[S]);else i(i.P+i.F*(v||j),n,w);return w}},function(t,n,r){var e=r(20),i=r(100),o=r(34),u=r(39)("IE_PROTO"),c=function(){},f="prototype",a=function(){var t,n=r(57)("iframe"),e=o.length;for(n.style.display="none",r(93).appendChild(n),n.src="javascript:",t=n.contentWindow.document,t.open(),t.write("<script>document.F=Object<\/script>"),t.close(),a=t.F;e--;)delete a[f][o[e]];return a()};t.exports=Object.create||function(t,n){var r;return null!==t?(c[f]=e(t),r=new c,c[f]=null,r[u]=t):r=a(),void 0===n?r:i(r,n)}},function(t,n,r){var e=r(63),i=r(34).concat("length","prototype");n.f=Object.getOwnPropertyNames||function(t){return e(t,i)}},function(t,n){n.f=Object.getOwnPropertySymbols},function(t,n,r){var e=r(8),i=r(9),o=r(90)(!1),u=r(39)("IE_PROTO");t.exports=function(t,n){var r,c=i(t),f=0,a=[];for(r in c)r!=u&&e(c,r)&&a.push(r);for(;n.length>f;)e(c,r=n[f++])&&(~o(a,r)||a.push(r));return a}},function(t,n,r){t.exports=r(13)},function(t,n,r){var e=r(76)("meta"),i=r(6),o=r(24),u=r(11).f,c=0,f=Object.isExtensible||function(){return!0},a=!r(4)(function(){return f(Object.preventExtensions({}))}),s=function(t){u(t,e,{value:{i:"O"+ ++c,w:{}}})},l=function(t,n){if(!i(t))return"symbol"==typeof t?t:("string"==typeof t?"S":"P")+t;if(!o(t,e)){if(!f(t))return"F";if(!n)return"E";s(t)}return t[e].i},h=function(t,n){if(!o(t,e)){if(!f(t))return!0;if(!n)return!1;s(t)}return t[e].w},v=function(t){return a&&p.NEED&&f(t)&&!o(t,e)&&s(t),t},p=t.exports={KEY:e,NEED:!1,fastKey:l,getWeak:h,onFreeze:v}},function(t,n){t.exports=function(t,n){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:n}}},function(t,n){var r=Math.ceil,e=Math.floor;t.exports=function(t){return isNaN(t=+t)?0:(t>0?e:r)(t)}},function(t,n){t.exports=function(t,n,r,e){if(!(t instanceof n)||void 0!==e&&e in t)throw TypeError(r+": incorrect invocation!");return t}},function(t,n){t.exports=!1},function(t,n,r){var e=r(2),i=r(173),o=r(133),u=r(145)("IE_PROTO"),c=function(){},f="prototype",a=function(){var t,n=r(132)("iframe"),e=o.length;for(n.style.display="none",r(135).appendChild(n),n.src="javascript:",t=n.contentWindow.document,t.open(),t.write("<script>document.F=Object<\/script>"),t.close(),a=t.F;e--;)delete a[f][o[e]];return a()};t.exports=Object.create||function(t,n){var r;return null!==t?(c[f]=e(t),r=new c,c[f]=null,r[u]=t):r=a(),void 0===n?r:i(r,n)}},function(t,n,r){var e=r(175),i=r(133).concat("length","prototype");n.f=Object.getOwnPropertyNames||function(t){return e(t,i)}},function(t,n,r){var e=r(175),i=r(133);t.exports=Object.keys||function(t){return e(t,i)}},function(t,n,r){var e=r(28);t.exports=function(t,n,r){for(var i in n)e(t,i,n[i],r);return t}},function(t,n,r){"use strict";var e=r(3),i=r(11),o=r(10),u=r(7)("species");t.exports=function(t){var n=e[t];o&&n&&!n[u]&&i.f(n,u,{configurable:!0,get:function(){return this}})}},function(t,n,r){var e=r(67),i=Math.max,o=Math.min;t.exports=function(t,n){return t=e(t),t<0?i(t+n,0):o(t,n)}},function(t,n){var r=0,e=Math.random();t.exports=function(t){return"Symbol(".concat(void 0===t?"":t,")_",(++r+e).toString(36))}},function(t,n,r){var e=r(33);t.exports=function(t){return Object(e(t))}},function(t,n,r){var e=r(7)("unscopables"),i=Array.prototype;void 0==i[e]&&r(27)(i,e,{}),t.exports=function(t){i[e][t]=!0}},function(t,n,r){var e=r(53),i=r(169),o=r(137),u=r(2),c=r(16),f=r(154),a={},s={},n=t.exports=function(t,n,r,l,h){var v,p,d,y,g=h?function(){return t}:f(t),b=e(r,l,n?2:1),m=0;if("function"!=typeof g)throw TypeError(t+" is not iterable!");if(o(g)){for(v=c(t.length);v>m;m++)if((y=n?b(u(p=t[m])[0],p[1]):b(t[m]))===a||y===s)return y}else for(d=g.call(t);!(p=d.next()).done;)if((y=i(d,b,p.value,n))===a||y===s)return y};n.BREAK=a,n.RETURN=s},function(t,n){t.exports={}},function(t,n,r){var e=r(11).f,i=r(24),o=r(7)("toStringTag");t.exports=function(t,n,r){t&&!i(t=r?t:t.prototype,o)&&e(t,o,{configurable:!0,value:n})}},function(t,n,r){var e=r(1),i=r(46),o=r(4),u=r(150),c="["+u+"]",f="​",a=RegExp("^"+c+c+"*"),s=RegExp(c+c+"*$"),l=function(t,n,r){var i={},c=o(function(){return!!u[t]()||f[t]()!=f}),a=i[t]=c?n(h):u[t];r&&(i[r]=a),e(e.P+e.F*c,"String",i)},h=l.trim=function(t,n){return t=String(i(t)),1&n&&(t=t.replace(a,"")),2&n&&(t=t.replace(s,"")),t};t.exports=l},function(t,n,r){t.exports={default:r(86),__esModule:!0}},function(t,n,r){t.exports={default:r(87),__esModule:!0}},function(t,n,r){"use strict";function e(t){return t&&t.__esModule?t:{default:t}}n.__esModule=!0;var i=r(84),o=e(i),u=r(83),c=e(u),f="function"==typeof c.default&&"symbol"==typeof o.default?function(t){return typeof t}:function(t){return t&&"function"==typeof c.default&&t.constructor===c.default&&t!==c.default.prototype?"symbol":typeof t};n.default="function"==typeof c.default&&"symbol"===f(o.default)?function(t){return void 0===t?"undefined":f(t)}:function(t){return t&&"function"==typeof c.default&&t.constructor===c.default&&t!==c.default.prototype?"symbol":void 0===t?"undefined":f(t)}},function(t,n,r){r(110),r(108),r(111),r(112),t.exports=r(25).Symbol},function(t,n,r){r(109),r(113),t.exports=r(44).f("iterator")},function(t,n){t.exports=function(t){if("function"!=typeof t)throw TypeError(t+" is not a function!");return t}},function(t,n){t.exports=function(){}},function(t,n,r){var e=r(9),i=r(106),o=r(105);t.exports=function(t){return function(n,r,u){var c,f=e(n),a=i(f.length),s=o(u,a);if(t&&r!=r){for(;a>s;)if((c=f[s++])!=c)return!0}else for(;a>s;s++)if((t||s in f)&&f[s]===r)return t||s||0;return!t&&-1}}},function(t,n,r){var e=r(88);t.exports=function(t,n,r){if(e(t),void 0===n)return t;switch(r){case 1:return function(r){return t.call(n,r)};case 2:return function(r,e){return t.call(n,r,e)};case 3:return function(r,e,i){return t.call(n,r,e,i)}}return function(){return t.apply(n,arguments)}}},function(t,n,r){var e=r(19),i=r(62),o=r(37);t.exports=function(t){var n=e(t),r=i.f;if(r)for(var u,c=r(t),f=o.f,a=0;c.length>a;)f.call(t,u=c[a++])&&n.push(u);return n}},function(t,n,r){t.exports=r(5).document&&document.documentElement},function(t,n,r){var e=r(56);t.exports=Object("z").propertyIsEnumerable(0)?Object:function(t){return"String"==e(t)?t.split(""):Object(t)}},function(t,n,r){var e=r(56);t.exports=Array.isArray||function(t){return"Array"==e(t)}},function(t,n,r){"use strict";var e=r(60),i=r(22),o=r(38),u={};r(13)(u,r(15)("iterator"),function(){return this}),t.exports=function(t,n,r){t.prototype=e(u,{next:i(1,r)}),o(t,n+" Iterator")}},function(t,n){t.exports=function(t,n){return{value:n,done:!!t}}},function(t,n,r){var e=r(19),i=r(9);t.exports=function(t,n){for(var r,o=i(t),u=e(o),c=u.length,f=0;c>f;)if(o[r=u[f++]]===n)return r}},function(t,n,r){var e=r(23)("meta"),i=r(21),o=r(8),u=r(14).f,c=0,f=Object.isExtensible||function(){return!0},a=!r(18)(function(){return f(Object.preventExtensions({}))}),s=function(t){u(t,e,{value:{i:"O"+ ++c,w:{}}})},l=function(t,n){if(!i(t))return"symbol"==typeof t?t:("string"==typeof t?"S":"P")+t;if(!o(t,e)){if(!f(t))return"F";if(!n)return"E";s(t)}return t[e].i},h=function(t,n){if(!o(t,e)){if(!f(t))return!0;if(!n)return!1;s(t)}return t[e].w},v=function(t){return a&&p.NEED&&f(t)&&!o(t,e)&&s(t),t},p=t.exports={KEY:e,NEED:!1,fastKey:l,getWeak:h,onFreeze:v}},function(t,n,r){var e=r(14),i=r(20),o=r(19);t.exports=r(12)?Object.defineProperties:function(t,n){i(t);for(var r,u=o(n),c=u.length,f=0;c>f;)e.f(t,r=u[f++],n[r]);return t}},function(t,n,r){var e=r(37),i=r(22),o=r(9),u=r(42),c=r(8),f=r(58),a=Object.getOwnPropertyDescriptor;n.f=r(12)?a:function(t,n){if(t=o(t),n=u(n,!0),f)try{return a(t,n)}catch(t){}if(c(t,n))return i(!e.f.call(t,n),t[n])}},function(t,n,r){var e=r(9),i=r(61).f,o={}.toString,u="object"==typeof window&&window&&Object.getOwnPropertyNames?Object.getOwnPropertyNames(window):[],c=function(t){try{return i(t)}catch(t){return u.slice()}};t.exports.f=function(t){return u&&"[object Window]"==o.call(t)?c(t):i(e(t))}},function(t,n,r){var e=r(8),i=r(77),o=r(39)("IE_PROTO"),u=Object.prototype;t.exports=Object.getPrototypeOf||function(t){return t=i(t),e(t,o)?t[o]:"function"==typeof t.constructor&&t instanceof t.constructor?t.constructor.prototype:t instanceof Object?u:null}},function(t,n,r){var e=r(41),i=r(33);t.exports=function(t){return function(n,r){var o,u,c=String(i(n)),f=e(r),a=c.length;return f<0||f>=a?t?"":void 0:(o=c.charCodeAt(f),o<55296||o>56319||f+1===a||(u=c.charCodeAt(f+1))<56320||u>57343?t?c.charAt(f):o:t?c.slice(f,f+2):u-56320+(o-55296<<10)+65536)}}},function(t,n,r){var e=r(41),i=Math.max,o=Math.min;t.exports=function(t,n){return t=e(t),t<0?i(t+n,0):o(t,n)}},function(t,n,r){var e=r(41),i=Math.min;t.exports=function(t){return t>0?i(e(t),9007199254740991):0}},function(t,n,r){"use strict";var e=r(89),i=r(97),o=r(35),u=r(9);t.exports=r(59)(Array,"Array",function(t,n){this._t=u(t),this._i=0,this._k=n},function(){var t=this._t,n=this._k,r=this._i++;return!t||r>=t.length?(this._t=void 0,i(1)):"keys"==n?i(0,r):"values"==n?i(0,t[r]):i(0,[r,t[r]])},"values"),o.Arguments=o.Array,e("keys"),e("values"),e("entries")},function(t,n){},function(t,n,r){"use strict";var e=r(104)(!0);r(59)(String,"String",function(t){this._t=String(t),this._i=0},function(){var t,n=this._t,r=this._i;return r>=n.length?{value:void 0,done:!0}:(t=e(n,r),this._i+=t.length,{value:t,done:!1})})},function(t,n,r){"use strict";var e=r(5),i=r(8),o=r(12),u=r(51),c=r(64),f=r(99).KEY,a=r(18),s=r(40),l=r(38),h=r(23),v=r(15),p=r(44),d=r(43),y=r(98),g=r(92),b=r(95),m=r(20),x=r(9),w=r(42),S=r(22),_=r(60),O=r(102),E=r(101),P=r(14),j=r(19),F=E.f,M=P.f,A=O.f,N=e.Symbol,T=e.JSON,I=T&&T.stringify,k="prototype",L=v("_hidden"),R=v("toPrimitive"),C={}.propertyIsEnumerable,D=s("symbol-registry"),U=s("symbols"),W=s("op-symbols"),G=Object[k],B="function"==typeof N,V=e.QObject,z=!V||!V[k]||!V[k].findChild,q=o&&a(function(){return 7!=_(M({},"a",{get:function(){return M(this,"a",{value:7}).a}})).a})?function(t,n,r){var e=F(G,n);e&&delete G[n],M(t,n,r),e&&t!==G&&M(G,n,e)}:M,K=function(t){var n=U[t]=_(N[k]);return n._k=t,n},J=B&&"symbol"==typeof N.iterator?function(t){return"symbol"==typeof t}:function(t){return t instanceof N},Y=function(t,n,r){return t===G&&Y(W,n,r),m(t),n=w(n,!0),m(r),i(U,n)?(r.enumerable?(i(t,L)&&t[L][n]&&(t[L][n]=!1),r=_(r,{enumerable:S(0,!1)})):(i(t,L)||M(t,L,S(1,{})),t[L][n]=!0),q(t,n,r)):M(t,n,r)},H=function(t,n){m(t);for(var r,e=g(n=x(n)),i=0,o=e.length;o>i;)Y(t,r=e[i++],n[r]);return t},$=function(t,n){return void 0===n?_(t):H(_(t),n)},X=function(t){var n=C.call(this,t=w(t,!0));return!(this===G&&i(U,t)&&!i(W,t))&&(!(n||!i(this,t)||!i(U,t)||i(this,L)&&this[L][t])||n)},Q=function(t,n){if(t=x(t),n=w(n,!0),t!==G||!i(U,n)||i(W,n)){var r=F(t,n);return!r||!i(U,n)||i(t,L)&&t[L][n]||(r.enumerable=!0),r}},Z=function(t){for(var n,r=A(x(t)),e=[],o=0;r.length>o;)i(U,n=r[o++])||n==L||n==f||e.push(n);return e},tt=function(t){for(var n,r=t===G,e=A(r?W:x(t)),o=[],u=0;e.length>u;)!i(U,n=e[u++])||r&&!i(G,n)||o.push(U[n]);return o};B||(N=function(){if(this instanceof N)throw TypeError("Symbol is not a constructor!");var t=h(arguments.length>0?arguments[0]:void 0),n=function(r){this===G&&n.call(W,r),i(this,L)&&i(this[L],t)&&(this[L][t]=!1),q(this,t,S(1,r))};return o&&z&&q(G,t,{configurable:!0,set:n}),K(t)},c(N[k],"toString",function(){return this._k}),E.f=Q,P.f=Y,r(61).f=O.f=Z,r(37).f=X,r(62).f=tt,o&&!r(36)&&c(G,"propertyIsEnumerable",X,!0),p.f=function(t){return K(v(t))}),u(u.G+u.W+u.F*!B,{Symbol:N});for(var nt="hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables".split(","),rt=0;nt.length>rt;)v(nt[rt++]);for(var nt=j(v.store),rt=0;nt.length>rt;)d(nt[rt++]);u(u.S+u.F*!B,"Symbol",{for:function(t){return i(D,t+="")?D[t]:D[t]=N(t)},keyFor:function(t){if(J(t))return y(D,t);throw TypeError(t+" is not a symbol!")},useSetter:function(){z=!0},useSimple:function(){z=!1}}),u(u.S+u.F*!B,"Object",{create:$,defineProperty:Y,defineProperties:H,getOwnPropertyDescriptor:Q,getOwnPropertyNames:Z,getOwnPropertySymbols:tt}),T&&u(u.S+u.F*(!B||a(function(){var t=N();return"[null]"!=I([t])||"{}"!=I({a:t})||"{}"!=I(Object(t))})),"JSON",{stringify:function(t){if(void 0!==t&&!J(t)){for(var n,r,e=[t],i=1;arguments.length>i;)e.push(arguments[i++]);return n=e[1],"function"==typeof n&&(r=n),!r&&b(n)||(n=function(t,n){if(r&&(n=r.call(this,t,n)),!J(n))return n}),e[1]=n,I.apply(T,e)}}}),N[k][R]||r(13)(N[k],R,N[k].valueOf),l(N,"Symbol"),l(Math,"Math",!0),l(e.JSON,"JSON",!0)},function(t,n,r){r(43)("asyncIterator")},function(t,n,r){r(43)("observable")},function(t,n,r){r(107);for(var e=r(5),i=r(13),o=r(35),u=r(15)("toStringTag"),c=["NodeList","DOMTokenList","MediaList","StyleSheetList","CSSRuleList"],f=0;f<5;f++){var a=c[f],s=e[a],l=s&&s.prototype;l&&!l[u]&&i(l,u,a),o[a]=o.Array}},function(t,n,r){var e=r(45),i=r(7)("toStringTag"),o="Arguments"==e(function(){return arguments}()),u=function(t,n){try{return t[n]}catch(t){}};t.exports=function(t){var n,r,c;return void 0===t?"Undefined":null===t?"Null":"string"==typeof(r=u(n=Object(t),i))?r:o?e(n):"Object"==(c=e(n))&&"function"==typeof n.callee?"Arguments":c}},function(t,n,r){var e=r(45);t.exports=Object("z").propertyIsEnumerable(0)?Object:function(t){return"String"==e(t)?t.split(""):Object(t)}},function(t,n){n.f={}.propertyIsEnumerable},function(t,n,r){var e=r(30),i=r(16),o=r(75);t.exports=function(t){return function(n,r,u){var c,f=e(n),a=i(f.length),s=o(u,a);if(t&&r!=r){for(;a>s;)if((c=f[s++])!=c)return!0}else for(;a>s;s++)if((t||s in f)&&f[s]===r)return t||s||0;return!t&&-1}}},function(t,n,r){"use strict";var e=r(3),i=r(1),o=r(28),u=r(73),c=r(65),f=r(79),a=r(68),s=r(6),l=r(4),h=r(123),v=r(81),p=r(136);t.exports=function(t,n,r,d,y,g){var b=e[t],m=b,x=y?"set":"add",w=m&&m.prototype,S={},_=function(t){var n=w[t];o(w,t,"delete"==t?function(t){return!(g&&!s(t))&&n.call(this,0===t?0:t)}:"has"==t?function(t){return!(g&&!s(t))&&n.call(this,0===t?0:t)}:"get"==t?function(t){return g&&!s(t)?void 0:n.call(this,0===t?0:t)}:"add"==t?function(t){return n.call(this,0===t?0:t),this}:function(t,r){return n.call(this,0===t?0:t,r),this})};if("function"==typeof m&&(g||w.forEach&&!l(function(){(new m).entries().next()}))){var O=new m,E=O[x](g?{}:-0,1)!=O,P=l(function(){O.has(1)}),j=h(function(t){new m(t)}),F=!g&&l(function(){for(var t=new m,n=5;n--;)t[x](n,n);return!t.has(-0)});j||(m=n(function(n,r){a(n,m,t);var e=p(new b,n,m);return void 0!=r&&f(r,y,e[x],e),e}),m.prototype=w,w.constructor=m),(P||F)&&(_("delete"),_("has"),y&&_("get")),(F||E)&&_(x),g&&w.clear&&delete w.clear}else m=d.getConstructor(n,t,y,x),u(m.prototype,r),c.NEED=!0;return v(m,t),S[t]=m,i(i.G+i.W+i.F*(m!=b),S),g||d.setStrong(m,t,y),m}},function(t,n,r){"use strict";var e=r(27),i=r(28),o=r(4),u=r(46),c=r(7);t.exports=function(t,n,r){var f=c(t),a=r(u,f,""[t]),s=a[0],l=a[1];o(function(){var n={};return n[f]=function(){return 7},7!=""[t](n)})&&(i(String.prototype,t,s),e(RegExp.prototype,f,2==n?function(t,n){return l.call(t,this,n)}:function(t){return l.call(t,this)}))}
},function(t,n,r){"use strict";var e=r(2);t.exports=function(){var t=e(this),n="";return t.global&&(n+="g"),t.ignoreCase&&(n+="i"),t.multiline&&(n+="m"),t.unicode&&(n+="u"),t.sticky&&(n+="y"),n}},function(t,n){t.exports=function(t,n,r){var e=void 0===r;switch(n.length){case 0:return e?t():t.call(r);case 1:return e?t(n[0]):t.call(r,n[0]);case 2:return e?t(n[0],n[1]):t.call(r,n[0],n[1]);case 3:return e?t(n[0],n[1],n[2]):t.call(r,n[0],n[1],n[2]);case 4:return e?t(n[0],n[1],n[2],n[3]):t.call(r,n[0],n[1],n[2],n[3])}return t.apply(r,n)}},function(t,n,r){var e=r(6),i=r(45),o=r(7)("match");t.exports=function(t){var n;return e(t)&&(void 0!==(n=t[o])?!!n:"RegExp"==i(t))}},function(t,n,r){var e=r(7)("iterator"),i=!1;try{var o=[7][e]();o.return=function(){i=!0},Array.from(o,function(){throw 2})}catch(t){}t.exports=function(t,n){if(!n&&!i)return!1;var r=!1;try{var o=[7],u=o[e]();u.next=function(){return{done:r=!0}},o[e]=function(){return u},t(o)}catch(t){}return r}},function(t,n,r){t.exports=r(69)||!r(4)(function(){var t=Math.random();__defineSetter__.call(null,t,function(){}),delete r(3)[t]})},function(t,n){n.f=Object.getOwnPropertySymbols},function(t,n,r){var e=r(3),i="__core-js_shared__",o=e[i]||(e[i]={});t.exports=function(t){return o[t]||(o[t]={})}},function(t,n,r){for(var e,i=r(3),o=r(27),u=r(76),c=u("typed_array"),f=u("view"),a=!(!i.ArrayBuffer||!i.DataView),s=a,l=0,h="Int8Array,Uint8Array,Uint8ClampedArray,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array".split(",");l<9;)(e=i[h[l++]])?(o(e.prototype,c,!0),o(e.prototype,f,!0)):s=!1;t.exports={ABV:a,CONSTR:s,TYPED:c,VIEW:f}},function(t,n){"use strict";var r={versions:function(){var t=window.navigator.userAgent;return{trident:t.indexOf("Trident")>-1,presto:t.indexOf("Presto")>-1,webKit:t.indexOf("AppleWebKit")>-1,gecko:t.indexOf("Gecko")>-1&&-1==t.indexOf("KHTML"),mobile:!!t.match(/AppleWebKit.*Mobile.*/),ios:!!t.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/),android:t.indexOf("Android")>-1||t.indexOf("Linux")>-1,iPhone:t.indexOf("iPhone")>-1||t.indexOf("Mac")>-1,iPad:t.indexOf("iPad")>-1,webApp:-1==t.indexOf("Safari"),weixin:-1==t.indexOf("MicroMessenger")}}()};t.exports=r},function(t,n,r){"use strict";var e=r(85),i=function(t){return t&&t.__esModule?t:{default:t}}(e),o=function(){function t(t,n,e){return n||e?String.fromCharCode(n||e):r[t]||t}function n(t){return e[t]}var r={"&quot;":'"',"&lt;":"<","&gt;":">","&amp;":"&","&nbsp;":" "},e={};for(var u in r)e[r[u]]=u;return r["&apos;"]="'",e["'"]="&#39;",{encode:function(t){return t?(""+t).replace(/['<> "&]/g,n).replace(/\r?\n/g,"<br/>").replace(/\s/g,"&nbsp;"):""},decode:function(n){return n?(""+n).replace(/<br\s*\/?>/gi,"\n").replace(/&quot;|&lt;|&gt;|&amp;|&nbsp;|&apos;|&#(\d+);|&#(\d+)/g,t).replace(/\u00a0/g," "):""},encodeBase16:function(t){if(!t)return t;t+="";for(var n=[],r=0,e=t.length;e>r;r++)n.push(t.charCodeAt(r).toString(16).toUpperCase());return n.join("")},encodeBase16forJSON:function(t){if(!t)return t;t=t.replace(/[\u4E00-\u9FBF]/gi,function(t){return escape(t).replace("%u","\\u")});for(var n=[],r=0,e=t.length;e>r;r++)n.push(t.charCodeAt(r).toString(16).toUpperCase());return n.join("")},decodeBase16:function(t){if(!t)return t;t+="";for(var n=[],r=0,e=t.length;e>r;r+=2)n.push(String.fromCharCode("0x"+t.slice(r,r+2)));return n.join("")},encodeObject:function(t){if(t instanceof Array)for(var n=0,r=t.length;r>n;n++)t[n]=o.encodeObject(t[n]);else if("object"==(void 0===t?"undefined":(0,i.default)(t)))for(var e in t)t[e]=o.encodeObject(t[e]);else if("string"==typeof t)return o.encode(t);return t},loadScript:function(t){var n=document.createElement("script");document.getElementsByTagName("body")[0].appendChild(n),n.setAttribute("src",t)},addLoadEvent:function(t){var n=window.onload;"function"!=typeof window.onload?window.onload=t:window.onload=function(){n(),t()}}}}();t.exports=o},function(t,n,r){"use strict";var e=r(17),i=r(75),o=r(16);t.exports=function(t){for(var n=e(this),r=o(n.length),u=arguments.length,c=i(u>1?arguments[1]:void 0,r),f=u>2?arguments[2]:void 0,a=void 0===f?r:i(f,r);a>c;)n[c++]=t;return n}},function(t,n,r){"use strict";var e=r(11),i=r(66);t.exports=function(t,n,r){n in t?e.f(t,n,i(0,r)):t[n]=r}},function(t,n,r){var e=r(6),i=r(3).document,o=e(i)&&e(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n){t.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},function(t,n,r){var e=r(7)("match");t.exports=function(t){var n=/./;try{"/./"[t](n)}catch(r){try{return n[e]=!1,!"/./"[t](n)}catch(t){}}return!0}},function(t,n,r){t.exports=r(3).document&&document.documentElement},function(t,n,r){var e=r(6),i=r(144).set;t.exports=function(t,n,r){var o,u=n.constructor;return u!==r&&"function"==typeof u&&(o=u.prototype)!==r.prototype&&e(o)&&i&&i(t,o),t}},function(t,n,r){var e=r(80),i=r(7)("iterator"),o=Array.prototype;t.exports=function(t){return void 0!==t&&(e.Array===t||o[i]===t)}},function(t,n,r){var e=r(45);t.exports=Array.isArray||function(t){return"Array"==e(t)}},function(t,n,r){"use strict";var e=r(70),i=r(66),o=r(81),u={};r(27)(u,r(7)("iterator"),function(){return this}),t.exports=function(t,n,r){t.prototype=e(u,{next:i(1,r)}),o(t,n+" Iterator")}},function(t,n,r){"use strict";var e=r(69),i=r(1),o=r(28),u=r(27),c=r(24),f=r(80),a=r(139),s=r(81),l=r(32),h=r(7)("iterator"),v=!([].keys&&"next"in[].keys()),p="keys",d="values",y=function(){return this};t.exports=function(t,n,r,g,b,m,x){a(r,n,g);var w,S,_,O=function(t){if(!v&&t in F)return F[t];switch(t){case p:case d:return function(){return new r(this,t)}}return function(){return new r(this,t)}},E=n+" Iterator",P=b==d,j=!1,F=t.prototype,M=F[h]||F["@@iterator"]||b&&F[b],A=M||O(b),N=b?P?O("entries"):A:void 0,T="Array"==n?F.entries||M:M;if(T&&(_=l(T.call(new t)))!==Object.prototype&&(s(_,E,!0),e||c(_,h)||u(_,h,y)),P&&M&&M.name!==d&&(j=!0,A=function(){return M.call(this)}),e&&!x||!v&&!j&&F[h]||u(F,h,A),f[n]=A,f[E]=y,b)if(w={values:P?A:O(d),keys:m?A:O(p),entries:N},x)for(S in w)S in F||o(F,S,w[S]);else i(i.P+i.F*(v||j),n,w);return w}},function(t,n){var r=Math.expm1;t.exports=!r||r(10)>22025.465794806718||r(10)<22025.465794806718||-2e-17!=r(-2e-17)?function(t){return 0==(t=+t)?t:t>-1e-6&&t<1e-6?t+t*t/2:Math.exp(t)-1}:r},function(t,n){t.exports=Math.sign||function(t){return 0==(t=+t)||t!=t?t:t<0?-1:1}},function(t,n,r){var e=r(3),i=r(151).set,o=e.MutationObserver||e.WebKitMutationObserver,u=e.process,c=e.Promise,f="process"==r(45)(u);t.exports=function(){var t,n,r,a=function(){var e,i;for(f&&(e=u.domain)&&e.exit();t;){i=t.fn,t=t.next;try{i()}catch(e){throw t?r():n=void 0,e}}n=void 0,e&&e.enter()};if(f)r=function(){u.nextTick(a)};else if(o){var s=!0,l=document.createTextNode("");new o(a).observe(l,{characterData:!0}),r=function(){l.data=s=!s}}else if(c&&c.resolve){var h=c.resolve();r=function(){h.then(a)}}else r=function(){i.call(e,a)};return function(e){var i={fn:e,next:void 0};n&&(n.next=i),t||(t=i,r()),n=i}}},function(t,n,r){var e=r(6),i=r(2),o=function(t,n){if(i(t),!e(n)&&null!==n)throw TypeError(n+": can't set as prototype!")};t.exports={set:Object.setPrototypeOf||("__proto__"in{}?function(t,n,e){try{e=r(53)(Function.call,r(31).f(Object.prototype,"__proto__").set,2),e(t,[]),n=!(t instanceof Array)}catch(t){n=!0}return function(t,r){return o(t,r),n?t.__proto__=r:e(t,r),t}}({},!1):void 0),check:o}},function(t,n,r){var e=r(126)("keys"),i=r(76);t.exports=function(t){return e[t]||(e[t]=i(t))}},function(t,n,r){var e=r(2),i=r(26),o=r(7)("species");t.exports=function(t,n){var r,u=e(t).constructor;return void 0===u||void 0==(r=e(u)[o])?n:i(r)}},function(t,n,r){var e=r(67),i=r(46);t.exports=function(t){return function(n,r){var o,u,c=String(i(n)),f=e(r),a=c.length;return f<0||f>=a?t?"":void 0:(o=c.charCodeAt(f),o<55296||o>56319||f+1===a||(u=c.charCodeAt(f+1))<56320||u>57343?t?c.charAt(f):o:t?c.slice(f,f+2):u-56320+(o-55296<<10)+65536)}}},function(t,n,r){var e=r(122),i=r(46);t.exports=function(t,n,r){if(e(n))throw TypeError("String#"+r+" doesn't accept regex!");return String(i(t))}},function(t,n,r){"use strict";var e=r(67),i=r(46);t.exports=function(t){var n=String(i(this)),r="",o=e(t);if(o<0||o==1/0)throw RangeError("Count can't be negative");for(;o>0;(o>>>=1)&&(n+=n))1&o&&(r+=n);return r}},function(t,n){t.exports="\t\n\v\f\r   ᠎             　\u2028\u2029\ufeff"},function(t,n,r){var e,i,o,u=r(53),c=r(121),f=r(135),a=r(132),s=r(3),l=s.process,h=s.setImmediate,v=s.clearImmediate,p=s.MessageChannel,d=0,y={},g="onreadystatechange",b=function(){var t=+this;if(y.hasOwnProperty(t)){var n=y[t];delete y[t],n()}},m=function(t){b.call(t.data)};h&&v||(h=function(t){for(var n=[],r=1;arguments.length>r;)n.push(arguments[r++]);return y[++d]=function(){c("function"==typeof t?t:Function(t),n)},e(d),d},v=function(t){delete y[t]},"process"==r(45)(l)?e=function(t){l.nextTick(u(b,t,1))}:p?(i=new p,o=i.port2,i.port1.onmessage=m,e=u(o.postMessage,o,1)):s.addEventListener&&"function"==typeof postMessage&&!s.importScripts?(e=function(t){s.postMessage(t+"","*")},s.addEventListener("message",m,!1)):e=g in a("script")?function(t){f.appendChild(a("script"))[g]=function(){f.removeChild(this),b.call(t)}}:function(t){setTimeout(u(b,t,1),0)}),t.exports={set:h,clear:v}},function(t,n,r){"use strict";var e=r(3),i=r(10),o=r(69),u=r(127),c=r(27),f=r(73),a=r(4),s=r(68),l=r(67),h=r(16),v=r(71).f,p=r(11).f,d=r(130),y=r(81),g="ArrayBuffer",b="DataView",m="prototype",x="Wrong length!",w="Wrong index!",S=e[g],_=e[b],O=e.Math,E=e.RangeError,P=e.Infinity,j=S,F=O.abs,M=O.pow,A=O.floor,N=O.log,T=O.LN2,I="buffer",k="byteLength",L="byteOffset",R=i?"_b":I,C=i?"_l":k,D=i?"_o":L,U=function(t,n,r){var e,i,o,u=Array(r),c=8*r-n-1,f=(1<<c)-1,a=f>>1,s=23===n?M(2,-24)-M(2,-77):0,l=0,h=t<0||0===t&&1/t<0?1:0;for(t=F(t),t!=t||t===P?(i=t!=t?1:0,e=f):(e=A(N(t)/T),t*(o=M(2,-e))<1&&(e--,o*=2),t+=e+a>=1?s/o:s*M(2,1-a),t*o>=2&&(e++,o/=2),e+a>=f?(i=0,e=f):e+a>=1?(i=(t*o-1)*M(2,n),e+=a):(i=t*M(2,a-1)*M(2,n),e=0));n>=8;u[l++]=255&i,i/=256,n-=8);for(e=e<<n|i,c+=n;c>0;u[l++]=255&e,e/=256,c-=8);return u[--l]|=128*h,u},W=function(t,n,r){var e,i=8*r-n-1,o=(1<<i)-1,u=o>>1,c=i-7,f=r-1,a=t[f--],s=127&a;for(a>>=7;c>0;s=256*s+t[f],f--,c-=8);for(e=s&(1<<-c)-1,s>>=-c,c+=n;c>0;e=256*e+t[f],f--,c-=8);if(0===s)s=1-u;else{if(s===o)return e?NaN:a?-P:P;e+=M(2,n),s-=u}return(a?-1:1)*e*M(2,s-n)},G=function(t){return t[3]<<24|t[2]<<16|t[1]<<8|t[0]},B=function(t){return[255&t]},V=function(t){return[255&t,t>>8&255]},z=function(t){return[255&t,t>>8&255,t>>16&255,t>>24&255]},q=function(t){return U(t,52,8)},K=function(t){return U(t,23,4)},J=function(t,n,r){p(t[m],n,{get:function(){return this[r]}})},Y=function(t,n,r,e){var i=+r,o=l(i);if(i!=o||o<0||o+n>t[C])throw E(w);var u=t[R]._b,c=o+t[D],f=u.slice(c,c+n);return e?f:f.reverse()},H=function(t,n,r,e,i,o){var u=+r,c=l(u);if(u!=c||c<0||c+n>t[C])throw E(w);for(var f=t[R]._b,a=c+t[D],s=e(+i),h=0;h<n;h++)f[a+h]=s[o?h:n-h-1]},$=function(t,n){s(t,S,g);var r=+n,e=h(r);if(r!=e)throw E(x);return e};if(u.ABV){if(!a(function(){new S})||!a(function(){new S(.5)})){S=function(t){return new j($(this,t))};for(var X,Q=S[m]=j[m],Z=v(j),tt=0;Z.length>tt;)(X=Z[tt++])in S||c(S,X,j[X]);o||(Q.constructor=S)}var nt=new _(new S(2)),rt=_[m].setInt8;nt.setInt8(0,2147483648),nt.setInt8(1,2147483649),!nt.getInt8(0)&&nt.getInt8(1)||f(_[m],{setInt8:function(t,n){rt.call(this,t,n<<24>>24)},setUint8:function(t,n){rt.call(this,t,n<<24>>24)}},!0)}else S=function(t){var n=$(this,t);this._b=d.call(Array(n),0),this[C]=n},_=function(t,n,r){s(this,_,b),s(t,S,b);var e=t[C],i=l(n);if(i<0||i>e)throw E("Wrong offset!");if(r=void 0===r?e-i:h(r),i+r>e)throw E(x);this[R]=t,this[D]=i,this[C]=r},i&&(J(S,k,"_l"),J(_,I,"_b"),J(_,k,"_l"),J(_,L,"_o")),f(_[m],{getInt8:function(t){return Y(this,1,t)[0]<<24>>24},getUint8:function(t){return Y(this,1,t)[0]},getInt16:function(t){var n=Y(this,2,t,arguments[1]);return(n[1]<<8|n[0])<<16>>16},getUint16:function(t){var n=Y(this,2,t,arguments[1]);return n[1]<<8|n[0]},getInt32:function(t){return G(Y(this,4,t,arguments[1]))},getUint32:function(t){return G(Y(this,4,t,arguments[1]))>>>0},getFloat32:function(t){return W(Y(this,4,t,arguments[1]),23,4)},getFloat64:function(t){return W(Y(this,8,t,arguments[1]),52,8)},setInt8:function(t,n){H(this,1,t,B,n)},setUint8:function(t,n){H(this,1,t,B,n)},setInt16:function(t,n){H(this,2,t,V,n,arguments[2])},setUint16:function(t,n){H(this,2,t,V,n,arguments[2])},setInt32:function(t,n){H(this,4,t,z,n,arguments[2])},setUint32:function(t,n){H(this,4,t,z,n,arguments[2])},setFloat32:function(t,n){H(this,4,t,K,n,arguments[2])},setFloat64:function(t,n){H(this,8,t,q,n,arguments[2])}});y(S,g),y(_,b),c(_[m],u.VIEW,!0),n[g]=S,n[b]=_},function(t,n,r){var e=r(3),i=r(52),o=r(69),u=r(182),c=r(11).f;t.exports=function(t){var n=i.Symbol||(i.Symbol=o?{}:e.Symbol||{});"_"==t.charAt(0)||t in n||c(n,t,{value:u.f(t)})}},function(t,n,r){var e=r(114),i=r(7)("iterator"),o=r(80);t.exports=r(52).getIteratorMethod=function(t){if(void 0!=t)return t[i]||t["@@iterator"]||o[e(t)]}},function(t,n,r){"use strict";var e=r(78),i=r(170),o=r(80),u=r(30);t.exports=r(140)(Array,"Array",function(t,n){this._t=u(t),this._i=0,this._k=n},function(){var t=this._t,n=this._k,r=this._i++;return!t||r>=t.length?(this._t=void 0,i(1)):"keys"==n?i(0,r):"values"==n?i(0,t[r]):i(0,[r,t[r]])},"values"),o.Arguments=o.Array,e("keys"),e("values"),e("entries")},function(t,n){function r(t,n){t.classList?t.classList.add(n):t.className+=" "+n}t.exports=r},function(t,n){function r(t,n){if(t.classList)t.classList.remove(n);else{var r=new RegExp("(^|\\b)"+n.split(" ").join("|")+"(\\b|$)","gi");t.className=t.className.replace(r," ")}}t.exports=r},function(t,n){function r(){throw new Error("setTimeout has not been defined")}function e(){throw new Error("clearTimeout has not been defined")}function i(t){if(s===setTimeout)return setTimeout(t,0);if((s===r||!s)&&setTimeout)return s=setTimeout,setTimeout(t,0);try{return s(t,0)}catch(n){try{return s.call(null,t,0)}catch(n){return s.call(this,t,0)}}}function o(t){if(l===clearTimeout)return clearTimeout(t);if((l===e||!l)&&clearTimeout)return l=clearTimeout,clearTimeout(t);try{return l(t)}catch(n){try{return l.call(null,t)}catch(n){return l.call(this,t)}}}function u(){d&&v&&(d=!1,v.length?p=v.concat(p):y=-1,p.length&&c())}function c(){if(!d){var t=i(u);d=!0;for(var n=p.length;n;){for(v=p,p=[];++y<n;)v&&v[y].run();y=-1,n=p.length}v=null,d=!1,o(t)}}function f(t,n){this.fun=t,this.array=n}function a(){}var s,l,h=t.exports={};!function(){try{s="function"==typeof setTimeout?setTimeout:r}catch(t){s=r}try{l="function"==typeof clearTimeout?clearTimeout:e}catch(t){l=e}}();var v,p=[],d=!1,y=-1;h.nextTick=function(t){var n=new Array(arguments.length-1);if(arguments.length>1)for(var r=1;r<arguments.length;r++)n[r-1]=arguments[r];p.push(new f(t,n)),1!==p.length||d||i(c)},f.prototype.run=function(){this.fun.apply(null,this.array)},h.title="browser",h.browser=!0,h.env={},h.argv=[],h.version="",h.versions={},h.on=a,h.addListener=a,h.once=a,h.off=a,h.removeListener=a,h.removeAllListeners=a,h.emit=a,h.prependListener=a,h.prependOnceListener=a,h.listeners=function(t){return[]},h.binding=function(t){throw new Error("process.binding is not supported")},h.cwd=function(){return"/"},h.chdir=function(t){throw new Error("process.chdir is not supported")},h.umask=function(){return 0}},function(t,n,r){var e=r(45);t.exports=function(t,n){if("number"!=typeof t&&"Number"!=e(t))throw TypeError(n);return+t}},function(t,n,r){"use strict";var e=r(17),i=r(75),o=r(16);t.exports=[].copyWithin||function(t,n){var r=e(this),u=o(r.length),c=i(t,u),f=i(n,u),a=arguments.length>2?arguments[2]:void 0,s=Math.min((void 0===a?u:i(a,u))-f,u-c),l=1;for(f<c&&c<f+s&&(l=-1,f+=s-1,c+=s-1);s-- >0;)f in r?r[c]=r[f]:delete r[c],c+=l,f+=l;return r}},function(t,n,r){var e=r(79);t.exports=function(t,n){var r=[];return e(t,!1,r.push,r,n),r}},function(t,n,r){var e=r(26),i=r(17),o=r(115),u=r(16);t.exports=function(t,n,r,c,f){e(n);var a=i(t),s=o(a),l=u(a.length),h=f?l-1:0,v=f?-1:1;if(r<2)for(;;){if(h in s){c=s[h],h+=v;break}if(h+=v,f?h<0:l<=h)throw TypeError("Reduce of empty array with no initial value")}for(;f?h>=0:l>h;h+=v)h in s&&(c=n(c,s[h],h,a));return c}},function(t,n,r){"use strict";var e=r(26),i=r(6),o=r(121),u=[].slice,c={},f=function(t,n,r){if(!(n in c)){for(var e=[],i=0;i<n;i++)e[i]="a["+i+"]";c[n]=Function("F,a","return new F("+e.join(",")+")")}return c[n](t,r)};t.exports=Function.bind||function(t){var n=e(this),r=u.call(arguments,1),c=function(){var e=r.concat(u.call(arguments));return this instanceof c?f(n,e.length,e):o(n,e,t)};return i(n.prototype)&&(c.prototype=n.prototype),c}},function(t,n,r){"use strict";var e=r(11).f,i=r(70),o=r(73),u=r(53),c=r(68),f=r(46),a=r(79),s=r(140),l=r(170),h=r(74),v=r(10),p=r(65).fastKey,d=v?"_s":"size",y=function(t,n){var r,e=p(n);if("F"!==e)return t._i[e];for(r=t._f;r;r=r.n)if(r.k==n)return r};t.exports={getConstructor:function(t,n,r,s){var l=t(function(t,e){c(t,l,n,"_i"),t._i=i(null),t._f=void 0,t._l=void 0,t[d]=0,void 0!=e&&a(e,r,t[s],t)});return o(l.prototype,{clear:function(){for(var t=this,n=t._i,r=t._f;r;r=r.n)r.r=!0,r.p&&(r.p=r.p.n=void 0),delete n[r.i];t._f=t._l=void 0,t[d]=0},delete:function(t){var n=this,r=y(n,t);if(r){var e=r.n,i=r.p;delete n._i[r.i],r.r=!0,i&&(i.n=e),e&&(e.p=i),n._f==r&&(n._f=e),n._l==r&&(n._l=i),n[d]--}return!!r},forEach:function(t){c(this,l,"forEach");for(var n,r=u(t,arguments.length>1?arguments[1]:void 0,3);n=n?n.n:this._f;)for(r(n.v,n.k,this);n&&n.r;)n=n.p},has:function(t){return!!y(this,t)}}),v&&e(l.prototype,"size",{get:function(){return f(this[d])}}),l},def:function(t,n,r){var e,i,o=y(t,n);return o?o.v=r:(t._l=o={i:i=p(n,!0),k:n,v:r,p:e=t._l,n:void 0,r:!1},t._f||(t._f=o),e&&(e.n=o),t[d]++,"F"!==i&&(t._i[i]=o)),t},getEntry:y,setStrong:function(t,n,r){s(t,n,function(t,n){this._t=t,this._k=n,this._l=void 0},function(){for(var t=this,n=t._k,r=t._l;r&&r.r;)r=r.p;return t._t&&(t._l=r=r?r.n:t._t._f)?"keys"==n?l(0,r.k):"values"==n?l(0,r.v):l(0,[r.k,r.v]):(t._t=void 0,l(1))},r?"entries":"values",!r,!0),h(n)}}},function(t,n,r){var e=r(114),i=r(161);t.exports=function(t){return function(){if(e(this)!=t)throw TypeError(t+"#toJSON isn't generic");return i(this)}}},function(t,n,r){"use strict";var e=r(73),i=r(65).getWeak,o=r(2),u=r(6),c=r(68),f=r(79),a=r(48),s=r(24),l=a(5),h=a(6),v=0,p=function(t){return t._l||(t._l=new d)},d=function(){this.a=[]},y=function(t,n){return l(t.a,function(t){return t[0]===n})};d.prototype={get:function(t){var n=y(this,t);if(n)return n[1]},has:function(t){return!!y(this,t)},set:function(t,n){var r=y(this,t);r?r[1]=n:this.a.push([t,n])},delete:function(t){var n=h(this.a,function(n){return n[0]===t});return~n&&this.a.splice(n,1),!!~n}},t.exports={getConstructor:function(t,n,r,o){var a=t(function(t,e){c(t,a,n,"_i"),t._i=v++,t._l=void 0,void 0!=e&&f(e,r,t[o],t)});return e(a.prototype,{delete:function(t){if(!u(t))return!1;var n=i(t);return!0===n?p(this).delete(t):n&&s(n,this._i)&&delete n[this._i]},has:function(t){if(!u(t))return!1;var n=i(t);return!0===n?p(this).has(t):n&&s(n,this._i)}}),a},def:function(t,n,r){var e=i(o(n),!0);return!0===e?p(t).set(n,r):e[t._i]=r,t},ufstore:p}},function(t,n,r){t.exports=!r(10)&&!r(4)(function(){return 7!=Object.defineProperty(r(132)("div"),"a",{get:function(){return 7}}).a})},function(t,n,r){var e=r(6),i=Math.floor;t.exports=function(t){return!e(t)&&isFinite(t)&&i(t)===t}},function(t,n,r){var e=r(2);t.exports=function(t,n,r,i){try{return i?n(e(r)[0],r[1]):n(r)}catch(n){var o=t.return;throw void 0!==o&&e(o.call(t)),n}}},function(t,n){t.exports=function(t,n){return{value:n,done:!!t}}},function(t,n){t.exports=Math.log1p||function(t){return(t=+t)>-1e-8&&t<1e-8?t-t*t/2:Math.log(1+t)}},function(t,n,r){"use strict";var e=r(72),i=r(125),o=r(116),u=r(17),c=r(115),f=Object.assign;t.exports=!f||r(4)(function(){var t={},n={},r=Symbol(),e="abcdefghijklmnopqrst";return t[r]=7,e.split("").forEach(function(t){n[t]=t}),7!=f({},t)[r]||Object.keys(f({},n)).join("")!=e})?function(t,n){for(var r=u(t),f=arguments.length,a=1,s=i.f,l=o.f;f>a;)for(var h,v=c(arguments[a++]),p=s?e(v).concat(s(v)):e(v),d=p.length,y=0;d>y;)l.call(v,h=p[y++])&&(r[h]=v[h]);return r}:f},function(t,n,r){var e=r(11),i=r(2),o=r(72);t.exports=r(10)?Object.defineProperties:function(t,n){i(t);for(var r,u=o(n),c=u.length,f=0;c>f;)e.f(t,r=u[f++],n[r]);return t}},function(t,n,r){var e=r(30),i=r(71).f,o={}.toString,u="object"==typeof window&&window&&Object.getOwnPropertyNames?Object.getOwnPropertyNames(window):[],c=function(t){try{return i(t)}catch(t){return u.slice()}};t.exports.f=function(t){return u&&"[object Window]"==o.call(t)?c(t):i(e(t))}},function(t,n,r){var e=r(24),i=r(30),o=r(117)(!1),u=r(145)("IE_PROTO");t.exports=function(t,n){var r,c=i(t),f=0,a=[];for(r in c)r!=u&&e(c,r)&&a.push(r);for(;n.length>f;)e(c,r=n[f++])&&(~o(a,r)||a.push(r));return a}},function(t,n,r){var e=r(72),i=r(30),o=r(116).f;t.exports=function(t){return function(n){for(var r,u=i(n),c=e(u),f=c.length,a=0,s=[];f>a;)o.call(u,r=c[a++])&&s.push(t?[r,u[r]]:u[r]);return s}}},function(t,n,r){var e=r(71),i=r(125),o=r(2),u=r(3).Reflect;t.exports=u&&u.ownKeys||function(t){var n=e.f(o(t)),r=i.f;return r?n.concat(r(t)):n}},function(t,n,r){var e=r(3).parseFloat,i=r(82).trim;t.exports=1/e(r(150)+"-0")!=-1/0?function(t){var n=i(String(t),3),r=e(n);return 0===r&&"-"==n.charAt(0)?-0:r}:e},function(t,n,r){var e=r(3).parseInt,i=r(82).trim,o=r(150),u=/^[\-+]?0[xX]/;t.exports=8!==e(o+"08")||22!==e(o+"0x16")?function(t,n){var r=i(String(t),3);return e(r,n>>>0||(u.test(r)?16:10))}:e},function(t,n){t.exports=Object.is||function(t,n){return t===n?0!==t||1/t==1/n:t!=t&&n!=n}},function(t,n,r){var e=r(16),i=r(149),o=r(46);t.exports=function(t,n,r,u){var c=String(o(t)),f=c.length,a=void 0===r?" ":String(r),s=e(n);if(s<=f||""==a)return c;var l=s-f,h=i.call(a,Math.ceil(l/a.length));return h.length>l&&(h=h.slice(0,l)),u?h+c:c+h}},function(t,n,r){n.f=r(7)},function(t,n,r){"use strict";var e=r(164);t.exports=r(118)("Map",function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}},{get:function(t){var n=e.getEntry(this,t);return n&&n.v},set:function(t,n){return e.def(this,0===t?0:t,n)}},e,!0)},function(t,n,r){r(10)&&"g"!=/./g.flags&&r(11).f(RegExp.prototype,"flags",{configurable:!0,get:r(120)})},function(t,n,r){"use strict";var e=r(164);t.exports=r(118)("Set",function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}},{add:function(t){return e.def(this,t=0===t?0:t,t)}},e)},function(t,n,r){"use strict";var e,i=r(48)(0),o=r(28),u=r(65),c=r(172),f=r(166),a=r(6),s=u.getWeak,l=Object.isExtensible,h=f.ufstore,v={},p=function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}},d={get:function(t){if(a(t)){var n=s(t);return!0===n?h(this).get(t):n?n[this._i]:void 0}},set:function(t,n){return f.def(this,t,n)}},y=t.exports=r(118)("WeakMap",p,d,f,!0,!0);7!=(new y).set((Object.freeze||Object)(v),7).get(v)&&(e=f.getConstructor(p),c(e.prototype,d),u.NEED=!0,i(["delete","has","get","set"],function(t){var n=y.prototype,r=n[t];o(n,t,function(n,i){if(a(n)&&!l(n)){this._f||(this._f=new e);var o=this._f[t](n,i);return"set"==t?this:o}return r.call(this,n,i)})}))},,,,function(t,n){"use strict";function r(){var t=document.querySelector("#page-nav");if(t&&!document.querySelector("#page-nav .extend.prev")&&(t.innerHTML='<a class="extend prev disabled" rel="prev">&laquo; Prev</a>'+t.innerHTML),t&&!document.querySelector("#page-nav .extend.next")&&(t.innerHTML=t.innerHTML+'<a class="extend next disabled" rel="next">Next &raquo;</a>'),yiliaConfig&&yiliaConfig.open_in_new){document.querySelectorAll(".article-entry a:not(.article-more-a)").forEach(function(t){var n=t.getAttribute("target");n&&""!==n||t.setAttribute("target","_blank")})}if(yiliaConfig&&yiliaConfig.toc_hide_index){document.querySelectorAll(".toc-number").forEach(function(t){t.style.display="none"})}var n=document.querySelector("#js-aboutme");n&&0!==n.length&&(n.innerHTML=n.innerText)}t.exports={init:r}},function(t,n,r){"use strict";function e(t){return t&&t.__esModule?t:{default:t}}function i(t,n){var r=/\/|index.html/g;return t.replace(r,"")===n.replace(r,"")}function o(){for(var t=document.querySelectorAll(".js-header-menu li a"),n=window.location.pathname,r=0,e=t.length;r<e;r++){var o=t[r];i(n,o.getAttribute("href"))&&(0,h.default)(o,"active")}}function u(t){for(var n=t.offsetLeft,r=t.offsetParent;null!==r;)n+=r.offsetLeft,r=r.offsetParent;return n}function c(t){for(var n=t.offsetTop,r=t.offsetParent;null!==r;)n+=r.offsetTop,r=r.offsetParent;return n}function f(t,n,r,e,i){var o=u(t),f=c(t)-n;if(f-r<=i){var a=t.$newDom;a||(a=t.cloneNode(!0),(0,d.default)(t,a),t.$newDom=a,a.style.position="fixed",a.style.top=(r||f)+"px",a.style.left=o+"px",a.style.zIndex=e||2,a.style.width="100%",a.style.color="#fff"),a.style.visibility="visible",t.style.visibility="hidden"}else{t.style.visibility="visible";var s=t.$newDom;s&&(s.style.visibility="hidden")}}function a(){var t=document.querySelector(".js-overlay"),n=document.querySelector(".js-header-menu");f(t,document.body.scrollTop,-63,2,0),f(n,document.body.scrollTop,1,3,0)}function s(){document.querySelector("#container").addEventListener("scroll",function(t){a()}),window.addEventListener("scroll",function(t){a()}),a()}var l=r(156),h=e(l),v=r(157),p=(e(v),r(382)),d=e(p),y=r(128),g=e(y),b=r(190),m=e(b),x=r(129);(function(){g.default.versions.mobile&&window.screen.width<800&&(o(),s())})(),(0,x.addLoadEvent)(function(){m.default.init()}),t.exports={}},,,,function(t,n,r){(function(t){"use strict";function n(t,n,r){t[n]||Object[e](t,n,{writable:!0,configurable:!0,value:r})}if(r(381),r(391),r(198),t._babelPolyfill)throw new Error("only one instance of babel-polyfill is allowed");t._babelPolyfill=!0;var e="defineProperty";n(String.prototype,"padLeft","".padStart),n(String.prototype,"padRight","".padEnd),"pop,reverse,shift,keys,values,entries,indexOf,every,some,forEach,map,filter,find,findIndex,includes,join,slice,concat,push,splice,unshift,sort,lastIndexOf,reduce,reduceRight,copyWithin,fill".split(",").forEach(function(t){[][t]&&n(Array,t,Function.call.bind([][t]))})}).call(n,function(){return this}())},,,function(t,n,r){r(210),t.exports=r(52).RegExp.escape},,,,function(t,n,r){var e=r(6),i=r(138),o=r(7)("species");t.exports=function(t){var n;return i(t)&&(n=t.constructor,"function"!=typeof n||n!==Array&&!i(n.prototype)||(n=void 0),e(n)&&null===(n=n[o])&&(n=void 0)),void 0===n?Array:n}},function(t,n,r){var e=r(202);t.exports=function(t,n){return new(e(t))(n)}},function(t,n,r){"use strict";var e=r(2),i=r(50),o="number";t.exports=function(t){if("string"!==t&&t!==o&&"default"!==t)throw TypeError("Incorrect hint");return i(e(this),t!=o)}},function(t,n,r){var e=r(72),i=r(125),o=r(116);t.exports=function(t){var n=e(t),r=i.f;if(r)for(var u,c=r(t),f=o.f,a=0;c.length>a;)f.call(t,u=c[a++])&&n.push(u);return n}},function(t,n,r){var e=r(72),i=r(30);t.exports=function(t,n){for(var r,o=i(t),u=e(o),c=u.length,f=0;c>f;)if(o[r=u[f++]]===n)return r}},function(t,n,r){"use strict";var e=r(208),i=r(121),o=r(26);t.exports=function(){for(var t=o(this),n=arguments.length,r=Array(n),u=0,c=e._,f=!1;n>u;)(r[u]=arguments[u++])===c&&(f=!0);return function(){var e,o=this,u=arguments.length,a=0,s=0;if(!f&&!u)return i(t,r,o);if(e=r.slice(),f)for(;n>a;a++)e[a]===c&&(e[a]=arguments[s++]);for(;u>s;)e.push(arguments[s++]);return i(t,e,o)}}},function(t,n,r){t.exports=r(3)},function(t,n){t.exports=function(t,n){var r=n===Object(n)?function(t){return n[t]}:n;return function(n){return String(n).replace(t,r)}}},function(t,n,r){var e=r(1),i=r(209)(/[\\^$*+?.()|[\]{}]/g,"\\$&");e(e.S,"RegExp",{escape:function(t){return i(t)}})},function(t,n,r){var e=r(1);e(e.P,"Array",{copyWithin:r(160)}),r(78)("copyWithin")},function(t,n,r){"use strict";var e=r(1),i=r(48)(4);e(e.P+e.F*!r(47)([].every,!0),"Array",{every:function(t){return i(this,t,arguments[1])}})},function(t,n,r){var e=r(1);e(e.P,"Array",{fill:r(130)}),r(78)("fill")},function(t,n,r){"use strict";var e=r(1),i=r(48)(2);e(e.P+e.F*!r(47)([].filter,!0),"Array",{filter:function(t){return i(this,t,arguments[1])}})},function(t,n,r){"use strict";var e=r(1),i=r(48)(6),o="findIndex",u=!0;o in[]&&Array(1)[o](function(){u=!1}),e(e.P+e.F*u,"Array",{findIndex:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),r(78)(o)},function(t,n,r){"use strict";var e=r(1),i=r(48)(5),o="find",u=!0;o in[]&&Array(1)[o](function(){u=!1}),e(e.P+e.F*u,"Array",{find:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),r(78)(o)},function(t,n,r){"use strict";var e=r(1),i=r(48)(0),o=r(47)([].forEach,!0);e(e.P+e.F*!o,"Array",{forEach:function(t){return i(this,t,arguments[1])}})},function(t,n,r){"use strict";var e=r(53),i=r(1),o=r(17),u=r(169),c=r(137),f=r(16),a=r(131),s=r(154);i(i.S+i.F*!r(123)(function(t){Array.from(t)}),"Array",{from:function(t){var n,r,i,l,h=o(t),v="function"==typeof this?this:Array,p=arguments.length,d=p>1?arguments[1]:void 0,y=void 0!==d,g=0,b=s(h);if(y&&(d=e(d,p>2?arguments[2]:void 0,2)),void 0==b||v==Array&&c(b))for(n=f(h.length),r=new v(n);n>g;g++)a(r,g,y?d(h[g],g):h[g]);else for(l=b.call(h),r=new v;!(i=l.next()).done;g++)a(r,g,y?u(l,d,[i.value,g],!0):i.value);return r.length=g,r}})},function(t,n,r){"use strict";var e=r(1),i=r(117)(!1),o=[].indexOf,u=!!o&&1/[1].indexOf(1,-0)<0;e(e.P+e.F*(u||!r(47)(o)),"Array",{indexOf:function(t){return u?o.apply(this,arguments)||0:i(this,t,arguments[1])}})},function(t,n,r){var e=r(1);e(e.S,"Array",{isArray:r(138)})},function(t,n,r){"use strict";var e=r(1),i=r(30),o=[].join;e(e.P+e.F*(r(115)!=Object||!r(47)(o)),"Array",{join:function(t){return o.call(i(this),void 0===t?",":t)}})},function(t,n,r){"use strict";var e=r(1),i=r(30),o=r(67),u=r(16),c=[].lastIndexOf,f=!!c&&1/[1].lastIndexOf(1,-0)<0;e(e.P+e.F*(f||!r(47)(c)),"Array",{lastIndexOf:function(t){if(f)return c.apply(this,arguments)||0;var n=i(this),r=u(n.length),e=r-1;for(arguments.length>1&&(e=Math.min(e,o(arguments[1]))),e<0&&(e=r+e);e>=0;e--)if(e in n&&n[e]===t)return e||0;return-1}})},function(t,n,r){"use strict";var e=r(1),i=r(48)(1);e(e.P+e.F*!r(47)([].map,!0),"Array",{map:function(t){return i(this,t,arguments[1])}})},function(t,n,r){"use strict";var e=r(1),i=r(131);e(e.S+e.F*r(4)(function(){function t(){}return!(Array.of.call(t)instanceof t)}),"Array",{of:function(){for(var t=0,n=arguments.length,r=new("function"==typeof this?this:Array)(n);n>t;)i(r,t,arguments[t++]);return r.length=n,r}})},function(t,n,r){"use strict";var e=r(1),i=r(162);e(e.P+e.F*!r(47)([].reduceRight,!0),"Array",{reduceRight:function(t){return i(this,t,arguments.length,arguments[1],!0)}})},function(t,n,r){"use strict";var e=r(1),i=r(162);e(e.P+e.F*!r(47)([].reduce,!0),"Array",{reduce:function(t){return i(this,t,arguments.length,arguments[1],!1)}})},function(t,n,r){"use strict";var e=r(1),i=r(135),o=r(45),u=r(75),c=r(16),f=[].slice;e(e.P+e.F*r(4)(function(){i&&f.call(i)}),"Array",{slice:function(t,n){var r=c(this.length),e=o(this);if(n=void 0===n?r:n,"Array"==e)return f.call(this,t,n);for(var i=u(t,r),a=u(n,r),s=c(a-i),l=Array(s),h=0;h<s;h++)l[h]="String"==e?this.charAt(i+h):this[i+h];return l}})},function(t,n,r){"use strict";var e=r(1),i=r(48)(3);e(e.P+e.F*!r(47)([].some,!0),"Array",{some:function(t){return i(this,t,arguments[1])}})},function(t,n,r){"use strict";var e=r(1),i=r(26),o=r(17),u=r(4),c=[].sort,f=[1,2,3];e(e.P+e.F*(u(function(){f.sort(void 0)})||!u(function(){f.sort(null)})||!r(47)(c)),"Array",{sort:function(t){return void 0===t?c.call(o(this)):c.call(o(this),i(t))}})},function(t,n,r){r(74)("Array")},function(t,n,r){var e=r(1);e(e.S,"Date",{now:function(){return(new Date).getTime()}})},function(t,n,r){"use strict";var e=r(1),i=r(4),o=Date.prototype.getTime,u=function(t){return t>9?t:"0"+t};e(e.P+e.F*(i(function(){return"0385-07-25T07:06:39.999Z"!=new Date(-5e13-1).toISOString()})||!i(function(){new Date(NaN).toISOString()})),"Date",{toISOString:function(){
if(!isFinite(o.call(this)))throw RangeError("Invalid time value");var t=this,n=t.getUTCFullYear(),r=t.getUTCMilliseconds(),e=n<0?"-":n>9999?"+":"";return e+("00000"+Math.abs(n)).slice(e?-6:-4)+"-"+u(t.getUTCMonth()+1)+"-"+u(t.getUTCDate())+"T"+u(t.getUTCHours())+":"+u(t.getUTCMinutes())+":"+u(t.getUTCSeconds())+"."+(r>99?r:"0"+u(r))+"Z"}})},function(t,n,r){"use strict";var e=r(1),i=r(17),o=r(50);e(e.P+e.F*r(4)(function(){return null!==new Date(NaN).toJSON()||1!==Date.prototype.toJSON.call({toISOString:function(){return 1}})}),"Date",{toJSON:function(t){var n=i(this),r=o(n);return"number"!=typeof r||isFinite(r)?n.toISOString():null}})},function(t,n,r){var e=r(7)("toPrimitive"),i=Date.prototype;e in i||r(27)(i,e,r(204))},function(t,n,r){var e=Date.prototype,i="Invalid Date",o="toString",u=e[o],c=e.getTime;new Date(NaN)+""!=i&&r(28)(e,o,function(){var t=c.call(this);return t===t?u.call(this):i})},function(t,n,r){var e=r(1);e(e.P,"Function",{bind:r(163)})},function(t,n,r){"use strict";var e=r(6),i=r(32),o=r(7)("hasInstance"),u=Function.prototype;o in u||r(11).f(u,o,{value:function(t){if("function"!=typeof this||!e(t))return!1;if(!e(this.prototype))return t instanceof this;for(;t=i(t);)if(this.prototype===t)return!0;return!1}})},function(t,n,r){var e=r(11).f,i=r(66),o=r(24),u=Function.prototype,c="name",f=Object.isExtensible||function(){return!0};c in u||r(10)&&e(u,c,{configurable:!0,get:function(){try{var t=this,n=(""+t).match(/^\s*function ([^ (]*)/)[1];return o(t,c)||!f(t)||e(t,c,i(5,n)),n}catch(t){return""}}})},function(t,n,r){var e=r(1),i=r(171),o=Math.sqrt,u=Math.acosh;e(e.S+e.F*!(u&&710==Math.floor(u(Number.MAX_VALUE))&&u(1/0)==1/0),"Math",{acosh:function(t){return(t=+t)<1?NaN:t>94906265.62425156?Math.log(t)+Math.LN2:i(t-1+o(t-1)*o(t+1))}})},function(t,n,r){function e(t){return isFinite(t=+t)&&0!=t?t<0?-e(-t):Math.log(t+Math.sqrt(t*t+1)):t}var i=r(1),o=Math.asinh;i(i.S+i.F*!(o&&1/o(0)>0),"Math",{asinh:e})},function(t,n,r){var e=r(1),i=Math.atanh;e(e.S+e.F*!(i&&1/i(-0)<0),"Math",{atanh:function(t){return 0==(t=+t)?t:Math.log((1+t)/(1-t))/2}})},function(t,n,r){var e=r(1),i=r(142);e(e.S,"Math",{cbrt:function(t){return i(t=+t)*Math.pow(Math.abs(t),1/3)}})},function(t,n,r){var e=r(1);e(e.S,"Math",{clz32:function(t){return(t>>>=0)?31-Math.floor(Math.log(t+.5)*Math.LOG2E):32}})},function(t,n,r){var e=r(1),i=Math.exp;e(e.S,"Math",{cosh:function(t){return(i(t=+t)+i(-t))/2}})},function(t,n,r){var e=r(1),i=r(141);e(e.S+e.F*(i!=Math.expm1),"Math",{expm1:i})},function(t,n,r){var e=r(1),i=r(142),o=Math.pow,u=o(2,-52),c=o(2,-23),f=o(2,127)*(2-c),a=o(2,-126),s=function(t){return t+1/u-1/u};e(e.S,"Math",{fround:function(t){var n,r,e=Math.abs(t),o=i(t);return e<a?o*s(e/a/c)*a*c:(n=(1+c/u)*e,r=n-(n-e),r>f||r!=r?o*(1/0):o*r)}})},function(t,n,r){var e=r(1),i=Math.abs;e(e.S,"Math",{hypot:function(t,n){for(var r,e,o=0,u=0,c=arguments.length,f=0;u<c;)r=i(arguments[u++]),f<r?(e=f/r,o=o*e*e+1,f=r):r>0?(e=r/f,o+=e*e):o+=r;return f===1/0?1/0:f*Math.sqrt(o)}})},function(t,n,r){var e=r(1),i=Math.imul;e(e.S+e.F*r(4)(function(){return-5!=i(4294967295,5)||2!=i.length}),"Math",{imul:function(t,n){var r=65535,e=+t,i=+n,o=r&e,u=r&i;return 0|o*u+((r&e>>>16)*u+o*(r&i>>>16)<<16>>>0)}})},function(t,n,r){var e=r(1);e(e.S,"Math",{log10:function(t){return Math.log(t)/Math.LN10}})},function(t,n,r){var e=r(1);e(e.S,"Math",{log1p:r(171)})},function(t,n,r){var e=r(1);e(e.S,"Math",{log2:function(t){return Math.log(t)/Math.LN2}})},function(t,n,r){var e=r(1);e(e.S,"Math",{sign:r(142)})},function(t,n,r){var e=r(1),i=r(141),o=Math.exp;e(e.S+e.F*r(4)(function(){return-2e-17!=!Math.sinh(-2e-17)}),"Math",{sinh:function(t){return Math.abs(t=+t)<1?(i(t)-i(-t))/2:(o(t-1)-o(-t-1))*(Math.E/2)}})},function(t,n,r){var e=r(1),i=r(141),o=Math.exp;e(e.S,"Math",{tanh:function(t){var n=i(t=+t),r=i(-t);return n==1/0?1:r==1/0?-1:(n-r)/(o(t)+o(-t))}})},function(t,n,r){var e=r(1);e(e.S,"Math",{trunc:function(t){return(t>0?Math.floor:Math.ceil)(t)}})},function(t,n,r){"use strict";var e=r(3),i=r(24),o=r(45),u=r(136),c=r(50),f=r(4),a=r(71).f,s=r(31).f,l=r(11).f,h=r(82).trim,v="Number",p=e[v],d=p,y=p.prototype,g=o(r(70)(y))==v,b="trim"in String.prototype,m=function(t){var n=c(t,!1);if("string"==typeof n&&n.length>2){n=b?n.trim():h(n,3);var r,e,i,o=n.charCodeAt(0);if(43===o||45===o){if(88===(r=n.charCodeAt(2))||120===r)return NaN}else if(48===o){switch(n.charCodeAt(1)){case 66:case 98:e=2,i=49;break;case 79:case 111:e=8,i=55;break;default:return+n}for(var u,f=n.slice(2),a=0,s=f.length;a<s;a++)if((u=f.charCodeAt(a))<48||u>i)return NaN;return parseInt(f,e)}}return+n};if(!p(" 0o1")||!p("0b1")||p("+0x1")){p=function(t){var n=arguments.length<1?0:t,r=this;return r instanceof p&&(g?f(function(){y.valueOf.call(r)}):o(r)!=v)?u(new d(m(n)),r,p):m(n)};for(var x,w=r(10)?a(d):"MAX_VALUE,MIN_VALUE,NaN,NEGATIVE_INFINITY,POSITIVE_INFINITY,EPSILON,isFinite,isInteger,isNaN,isSafeInteger,MAX_SAFE_INTEGER,MIN_SAFE_INTEGER,parseFloat,parseInt,isInteger".split(","),S=0;w.length>S;S++)i(d,x=w[S])&&!i(p,x)&&l(p,x,s(d,x));p.prototype=y,y.constructor=p,r(28)(e,v,p)}},function(t,n,r){var e=r(1);e(e.S,"Number",{EPSILON:Math.pow(2,-52)})},function(t,n,r){var e=r(1),i=r(3).isFinite;e(e.S,"Number",{isFinite:function(t){return"number"==typeof t&&i(t)}})},function(t,n,r){var e=r(1);e(e.S,"Number",{isInteger:r(168)})},function(t,n,r){var e=r(1);e(e.S,"Number",{isNaN:function(t){return t!=t}})},function(t,n,r){var e=r(1),i=r(168),o=Math.abs;e(e.S,"Number",{isSafeInteger:function(t){return i(t)&&o(t)<=9007199254740991}})},function(t,n,r){var e=r(1);e(e.S,"Number",{MAX_SAFE_INTEGER:9007199254740991})},function(t,n,r){var e=r(1);e(e.S,"Number",{MIN_SAFE_INTEGER:-9007199254740991})},function(t,n,r){var e=r(1),i=r(178);e(e.S+e.F*(Number.parseFloat!=i),"Number",{parseFloat:i})},function(t,n,r){var e=r(1),i=r(179);e(e.S+e.F*(Number.parseInt!=i),"Number",{parseInt:i})},function(t,n,r){"use strict";var e=r(1),i=r(67),o=r(159),u=r(149),c=1..toFixed,f=Math.floor,a=[0,0,0,0,0,0],s="Number.toFixed: incorrect invocation!",l="0",h=function(t,n){for(var r=-1,e=n;++r<6;)e+=t*a[r],a[r]=e%1e7,e=f(e/1e7)},v=function(t){for(var n=6,r=0;--n>=0;)r+=a[n],a[n]=f(r/t),r=r%t*1e7},p=function(){for(var t=6,n="";--t>=0;)if(""!==n||0===t||0!==a[t]){var r=String(a[t]);n=""===n?r:n+u.call(l,7-r.length)+r}return n},d=function(t,n,r){return 0===n?r:n%2==1?d(t,n-1,r*t):d(t*t,n/2,r)},y=function(t){for(var n=0,r=t;r>=4096;)n+=12,r/=4096;for(;r>=2;)n+=1,r/=2;return n};e(e.P+e.F*(!!c&&("0.000"!==8e-5.toFixed(3)||"1"!==.9.toFixed(0)||"1.25"!==1.255.toFixed(2)||"1000000000000000128"!==(0xde0b6b3a7640080).toFixed(0))||!r(4)(function(){c.call({})})),"Number",{toFixed:function(t){var n,r,e,c,f=o(this,s),a=i(t),g="",b=l;if(a<0||a>20)throw RangeError(s);if(f!=f)return"NaN";if(f<=-1e21||f>=1e21)return String(f);if(f<0&&(g="-",f=-f),f>1e-21)if(n=y(f*d(2,69,1))-69,r=n<0?f*d(2,-n,1):f/d(2,n,1),r*=4503599627370496,(n=52-n)>0){for(h(0,r),e=a;e>=7;)h(1e7,0),e-=7;for(h(d(10,e,1),0),e=n-1;e>=23;)v(1<<23),e-=23;v(1<<e),h(1,1),v(2),b=p()}else h(0,r),h(1<<-n,0),b=p()+u.call(l,a);return a>0?(c=b.length,b=g+(c<=a?"0."+u.call(l,a-c)+b:b.slice(0,c-a)+"."+b.slice(c-a))):b=g+b,b}})},function(t,n,r){"use strict";var e=r(1),i=r(4),o=r(159),u=1..toPrecision;e(e.P+e.F*(i(function(){return"1"!==u.call(1,void 0)})||!i(function(){u.call({})})),"Number",{toPrecision:function(t){var n=o(this,"Number#toPrecision: incorrect invocation!");return void 0===t?u.call(n):u.call(n,t)}})},function(t,n,r){var e=r(1);e(e.S+e.F,"Object",{assign:r(172)})},function(t,n,r){var e=r(1);e(e.S,"Object",{create:r(70)})},function(t,n,r){var e=r(1);e(e.S+e.F*!r(10),"Object",{defineProperties:r(173)})},function(t,n,r){var e=r(1);e(e.S+e.F*!r(10),"Object",{defineProperty:r(11).f})},function(t,n,r){var e=r(6),i=r(65).onFreeze;r(49)("freeze",function(t){return function(n){return t&&e(n)?t(i(n)):n}})},function(t,n,r){var e=r(30),i=r(31).f;r(49)("getOwnPropertyDescriptor",function(){return function(t,n){return i(e(t),n)}})},function(t,n,r){r(49)("getOwnPropertyNames",function(){return r(174).f})},function(t,n,r){var e=r(17),i=r(32);r(49)("getPrototypeOf",function(){return function(t){return i(e(t))}})},function(t,n,r){var e=r(6);r(49)("isExtensible",function(t){return function(n){return!!e(n)&&(!t||t(n))}})},function(t,n,r){var e=r(6);r(49)("isFrozen",function(t){return function(n){return!e(n)||!!t&&t(n)}})},function(t,n,r){var e=r(6);r(49)("isSealed",function(t){return function(n){return!e(n)||!!t&&t(n)}})},function(t,n,r){var e=r(1);e(e.S,"Object",{is:r(180)})},function(t,n,r){var e=r(17),i=r(72);r(49)("keys",function(){return function(t){return i(e(t))}})},function(t,n,r){var e=r(6),i=r(65).onFreeze;r(49)("preventExtensions",function(t){return function(n){return t&&e(n)?t(i(n)):n}})},function(t,n,r){var e=r(6),i=r(65).onFreeze;r(49)("seal",function(t){return function(n){return t&&e(n)?t(i(n)):n}})},function(t,n,r){var e=r(1);e(e.S,"Object",{setPrototypeOf:r(144).set})},function(t,n,r){"use strict";var e=r(114),i={};i[r(7)("toStringTag")]="z",i+""!="[object z]"&&r(28)(Object.prototype,"toString",function(){return"[object "+e(this)+"]"},!0)},function(t,n,r){var e=r(1),i=r(178);e(e.G+e.F*(parseFloat!=i),{parseFloat:i})},function(t,n,r){var e=r(1),i=r(179);e(e.G+e.F*(parseInt!=i),{parseInt:i})},function(t,n,r){"use strict";var e,i,o,u=r(69),c=r(3),f=r(53),a=r(114),s=r(1),l=r(6),h=r(26),v=r(68),p=r(79),d=r(146),y=r(151).set,g=r(143)(),b="Promise",m=c.TypeError,x=c.process,w=c[b],x=c.process,S="process"==a(x),_=function(){},O=!!function(){try{var t=w.resolve(1),n=(t.constructor={})[r(7)("species")]=function(t){t(_,_)};return(S||"function"==typeof PromiseRejectionEvent)&&t.then(_)instanceof n}catch(t){}}(),E=function(t,n){return t===n||t===w&&n===o},P=function(t){var n;return!(!l(t)||"function"!=typeof(n=t.then))&&n},j=function(t){return E(w,t)?new F(t):new i(t)},F=i=function(t){var n,r;this.promise=new t(function(t,e){if(void 0!==n||void 0!==r)throw m("Bad Promise constructor");n=t,r=e}),this.resolve=h(n),this.reject=h(r)},M=function(t){try{t()}catch(t){return{error:t}}},A=function(t,n){if(!t._n){t._n=!0;var r=t._c;g(function(){for(var e=t._v,i=1==t._s,o=0;r.length>o;)!function(n){var r,o,u=i?n.ok:n.fail,c=n.resolve,f=n.reject,a=n.domain;try{u?(i||(2==t._h&&I(t),t._h=1),!0===u?r=e:(a&&a.enter(),r=u(e),a&&a.exit()),r===n.promise?f(m("Promise-chain cycle")):(o=P(r))?o.call(r,c,f):c(r)):f(e)}catch(t){f(t)}}(r[o++]);t._c=[],t._n=!1,n&&!t._h&&N(t)})}},N=function(t){y.call(c,function(){var n,r,e,i=t._v;if(T(t)&&(n=M(function(){S?x.emit("unhandledRejection",i,t):(r=c.onunhandledrejection)?r({promise:t,reason:i}):(e=c.console)&&e.error&&e.error("Unhandled promise rejection",i)}),t._h=S||T(t)?2:1),t._a=void 0,n)throw n.error})},T=function(t){if(1==t._h)return!1;for(var n,r=t._a||t._c,e=0;r.length>e;)if(n=r[e++],n.fail||!T(n.promise))return!1;return!0},I=function(t){y.call(c,function(){var n;S?x.emit("rejectionHandled",t):(n=c.onrejectionhandled)&&n({promise:t,reason:t._v})})},k=function(t){var n=this;n._d||(n._d=!0,n=n._w||n,n._v=t,n._s=2,n._a||(n._a=n._c.slice()),A(n,!0))},L=function(t){var n,r=this;if(!r._d){r._d=!0,r=r._w||r;try{if(r===t)throw m("Promise can't be resolved itself");(n=P(t))?g(function(){var e={_w:r,_d:!1};try{n.call(t,f(L,e,1),f(k,e,1))}catch(t){k.call(e,t)}}):(r._v=t,r._s=1,A(r,!1))}catch(t){k.call({_w:r,_d:!1},t)}}};O||(w=function(t){v(this,w,b,"_h"),h(t),e.call(this);try{t(f(L,this,1),f(k,this,1))}catch(t){k.call(this,t)}},e=function(t){this._c=[],this._a=void 0,this._s=0,this._d=!1,this._v=void 0,this._h=0,this._n=!1},e.prototype=r(73)(w.prototype,{then:function(t,n){var r=j(d(this,w));return r.ok="function"!=typeof t||t,r.fail="function"==typeof n&&n,r.domain=S?x.domain:void 0,this._c.push(r),this._a&&this._a.push(r),this._s&&A(this,!1),r.promise},catch:function(t){return this.then(void 0,t)}}),F=function(){var t=new e;this.promise=t,this.resolve=f(L,t,1),this.reject=f(k,t,1)}),s(s.G+s.W+s.F*!O,{Promise:w}),r(81)(w,b),r(74)(b),o=r(52)[b],s(s.S+s.F*!O,b,{reject:function(t){var n=j(this);return(0,n.reject)(t),n.promise}}),s(s.S+s.F*(u||!O),b,{resolve:function(t){if(t instanceof w&&E(t.constructor,this))return t;var n=j(this);return(0,n.resolve)(t),n.promise}}),s(s.S+s.F*!(O&&r(123)(function(t){w.all(t).catch(_)})),b,{all:function(t){var n=this,r=j(n),e=r.resolve,i=r.reject,o=M(function(){var r=[],o=0,u=1;p(t,!1,function(t){var c=o++,f=!1;r.push(void 0),u++,n.resolve(t).then(function(t){f||(f=!0,r[c]=t,--u||e(r))},i)}),--u||e(r)});return o&&i(o.error),r.promise},race:function(t){var n=this,r=j(n),e=r.reject,i=M(function(){p(t,!1,function(t){n.resolve(t).then(r.resolve,e)})});return i&&e(i.error),r.promise}})},function(t,n,r){var e=r(1),i=r(26),o=r(2),u=(r(3).Reflect||{}).apply,c=Function.apply;e(e.S+e.F*!r(4)(function(){u(function(){})}),"Reflect",{apply:function(t,n,r){var e=i(t),f=o(r);return u?u(e,n,f):c.call(e,n,f)}})},function(t,n,r){var e=r(1),i=r(70),o=r(26),u=r(2),c=r(6),f=r(4),a=r(163),s=(r(3).Reflect||{}).construct,l=f(function(){function t(){}return!(s(function(){},[],t)instanceof t)}),h=!f(function(){s(function(){})});e(e.S+e.F*(l||h),"Reflect",{construct:function(t,n){o(t),u(n);var r=arguments.length<3?t:o(arguments[2]);if(h&&!l)return s(t,n,r);if(t==r){switch(n.length){case 0:return new t;case 1:return new t(n[0]);case 2:return new t(n[0],n[1]);case 3:return new t(n[0],n[1],n[2]);case 4:return new t(n[0],n[1],n[2],n[3])}var e=[null];return e.push.apply(e,n),new(a.apply(t,e))}var f=r.prototype,v=i(c(f)?f:Object.prototype),p=Function.apply.call(t,v,n);return c(p)?p:v}})},function(t,n,r){var e=r(11),i=r(1),o=r(2),u=r(50);i(i.S+i.F*r(4)(function(){Reflect.defineProperty(e.f({},1,{value:1}),1,{value:2})}),"Reflect",{defineProperty:function(t,n,r){o(t),n=u(n,!0),o(r);try{return e.f(t,n,r),!0}catch(t){return!1}}})},function(t,n,r){var e=r(1),i=r(31).f,o=r(2);e(e.S,"Reflect",{deleteProperty:function(t,n){var r=i(o(t),n);return!(r&&!r.configurable)&&delete t[n]}})},function(t,n,r){"use strict";var e=r(1),i=r(2),o=function(t){this._t=i(t),this._i=0;var n,r=this._k=[];for(n in t)r.push(n)};r(139)(o,"Object",function(){var t,n=this,r=n._k;do{if(n._i>=r.length)return{value:void 0,done:!0}}while(!((t=r[n._i++])in n._t));return{value:t,done:!1}}),e(e.S,"Reflect",{enumerate:function(t){return new o(t)}})},function(t,n,r){var e=r(31),i=r(1),o=r(2);i(i.S,"Reflect",{getOwnPropertyDescriptor:function(t,n){return e.f(o(t),n)}})},function(t,n,r){var e=r(1),i=r(32),o=r(2);e(e.S,"Reflect",{getPrototypeOf:function(t){return i(o(t))}})},function(t,n,r){function e(t,n){var r,c,s=arguments.length<3?t:arguments[2];return a(t)===s?t[n]:(r=i.f(t,n))?u(r,"value")?r.value:void 0!==r.get?r.get.call(s):void 0:f(c=o(t))?e(c,n,s):void 0}var i=r(31),o=r(32),u=r(24),c=r(1),f=r(6),a=r(2);c(c.S,"Reflect",{get:e})},function(t,n,r){var e=r(1);e(e.S,"Reflect",{has:function(t,n){return n in t}})},function(t,n,r){var e=r(1),i=r(2),o=Object.isExtensible;e(e.S,"Reflect",{isExtensible:function(t){return i(t),!o||o(t)}})},function(t,n,r){var e=r(1);e(e.S,"Reflect",{ownKeys:r(177)})},function(t,n,r){var e=r(1),i=r(2),o=Object.preventExtensions;e(e.S,"Reflect",{preventExtensions:function(t){i(t);try{return o&&o(t),!0}catch(t){return!1}}})},function(t,n,r){var e=r(1),i=r(144);i&&e(e.S,"Reflect",{setPrototypeOf:function(t,n){i.check(t,n);try{return i.set(t,n),!0}catch(t){return!1}}})},function(t,n,r){function e(t,n,r){var f,h,v=arguments.length<4?t:arguments[3],p=o.f(s(t),n);if(!p){if(l(h=u(t)))return e(h,n,r,v);p=a(0)}return c(p,"value")?!(!1===p.writable||!l(v)||(f=o.f(v,n)||a(0),f.value=r,i.f(v,n,f),0)):void 0!==p.set&&(p.set.call(v,r),!0)}var i=r(11),o=r(31),u=r(32),c=r(24),f=r(1),a=r(66),s=r(2),l=r(6);f(f.S,"Reflect",{set:e})},function(t,n,r){var e=r(3),i=r(136),o=r(11).f,u=r(71).f,c=r(122),f=r(120),a=e.RegExp,s=a,l=a.prototype,h=/a/g,v=/a/g,p=new a(h)!==h;if(r(10)&&(!p||r(4)(function(){return v[r(7)("match")]=!1,a(h)!=h||a(v)==v||"/a/i"!=a(h,"i")}))){a=function(t,n){var r=this instanceof a,e=c(t),o=void 0===n;return!r&&e&&t.constructor===a&&o?t:i(p?new s(e&&!o?t.source:t,n):s((e=t instanceof a)?t.source:t,e&&o?f.call(t):n),r?this:l,a)};for(var d=u(s),y=0;d.length>y;)!function(t){t in a||o(a,t,{configurable:!0,get:function(){return s[t]},set:function(n){s[t]=n}})}(d[y++]);l.constructor=a,a.prototype=l,r(28)(e,"RegExp",a)}r(74)("RegExp")},function(t,n,r){r(119)("match",1,function(t,n,r){return[function(r){"use strict";var e=t(this),i=void 0==r?void 0:r[n];return void 0!==i?i.call(r,e):new RegExp(r)[n](String(e))},r]})},function(t,n,r){r(119)("replace",2,function(t,n,r){return[function(e,i){"use strict";var o=t(this),u=void 0==e?void 0:e[n];return void 0!==u?u.call(e,o,i):r.call(String(o),e,i)},r]})},function(t,n,r){r(119)("search",1,function(t,n,r){return[function(r){"use strict";var e=t(this),i=void 0==r?void 0:r[n];return void 0!==i?i.call(r,e):new RegExp(r)[n](String(e))},r]})},function(t,n,r){r(119)("split",2,function(t,n,e){"use strict";var i=r(122),o=e,u=[].push,c="split",f="length",a="lastIndex";if("c"=="abbc"[c](/(b)*/)[1]||4!="test"[c](/(?:)/,-1)[f]||2!="ab"[c](/(?:ab)*/)[f]||4!="."[c](/(.?)(.?)/)[f]||"."[c](/()()/)[f]>1||""[c](/.?/)[f]){var s=void 0===/()??/.exec("")[1];e=function(t,n){var r=String(this);if(void 0===t&&0===n)return[];if(!i(t))return o.call(r,t,n);var e,c,l,h,v,p=[],d=(t.ignoreCase?"i":"")+(t.multiline?"m":"")+(t.unicode?"u":"")+(t.sticky?"y":""),y=0,g=void 0===n?4294967295:n>>>0,b=new RegExp(t.source,d+"g");for(s||(e=new RegExp("^"+b.source+"$(?!\\s)",d));(c=b.exec(r))&&!((l=c.index+c[0][f])>y&&(p.push(r.slice(y,c.index)),!s&&c[f]>1&&c[0].replace(e,function(){for(v=1;v<arguments[f]-2;v++)void 0===arguments[v]&&(c[v]=void 0)}),c[f]>1&&c.index<r[f]&&u.apply(p,c.slice(1)),h=c[0][f],y=l,p[f]>=g));)b[a]===c.index&&b[a]++;return y===r[f]?!h&&b.test("")||p.push(""):p.push(r.slice(y)),p[f]>g?p.slice(0,g):p}}else"0"[c](void 0,0)[f]&&(e=function(t,n){return void 0===t&&0===n?[]:o.call(this,t,n)});return[function(r,i){var o=t(this),u=void 0==r?void 0:r[n];return void 0!==u?u.call(r,o,i):e.call(String(o),r,i)},e]})},function(t,n,r){"use strict";r(184);var e=r(2),i=r(120),o=r(10),u="toString",c=/./[u],f=function(t){r(28)(RegExp.prototype,u,t,!0)};r(4)(function(){return"/a/b"!=c.call({source:"a",flags:"b"})})?f(function(){var t=e(this);return"/".concat(t.source,"/","flags"in t?t.flags:!o&&t instanceof RegExp?i.call(t):void 0)}):c.name!=u&&f(function(){return c.call(this)})},function(t,n,r){"use strict";r(29)("anchor",function(t){return function(n){return t(this,"a","name",n)}})},function(t,n,r){"use strict";r(29)("big",function(t){return function(){return t(this,"big","","")}})},function(t,n,r){"use strict";r(29)("blink",function(t){return function(){return t(this,"blink","","")}})},function(t,n,r){"use strict";r(29)("bold",function(t){return function(){return t(this,"b","","")}})},function(t,n,r){"use strict";var e=r(1),i=r(147)(!1);e(e.P,"String",{codePointAt:function(t){return i(this,t)}})},function(t,n,r){"use strict";var e=r(1),i=r(16),o=r(148),u="endsWith",c=""[u];e(e.P+e.F*r(134)(u),"String",{endsWith:function(t){var n=o(this,t,u),r=arguments.length>1?arguments[1]:void 0,e=i(n.length),f=void 0===r?e:Math.min(i(r),e),a=String(t);return c?c.call(n,a,f):n.slice(f-a.length,f)===a}})},function(t,n,r){"use strict";r(29)("fixed",function(t){return function(){return t(this,"tt","","")}})},function(t,n,r){"use strict";r(29)("fontcolor",function(t){return function(n){return t(this,"font","color",n)}})},function(t,n,r){"use strict";r(29)("fontsize",function(t){return function(n){return t(this,"font","size",n)}})},function(t,n,r){var e=r(1),i=r(75),o=String.fromCharCode,u=String.fromCodePoint;e(e.S+e.F*(!!u&&1!=u.length),"String",{fromCodePoint:function(t){for(var n,r=[],e=arguments.length,u=0;e>u;){if(n=+arguments[u++],i(n,1114111)!==n)throw RangeError(n+" is not a valid code point");r.push(n<65536?o(n):o(55296+((n-=65536)>>10),n%1024+56320))}return r.join("")}})},function(t,n,r){"use strict";var e=r(1),i=r(148),o="includes";e(e.P+e.F*r(134)(o),"String",{includes:function(t){return!!~i(this,t,o).indexOf(t,arguments.length>1?arguments[1]:void 0)}})},function(t,n,r){"use strict";r(29)("italics",function(t){return function(){return t(this,"i","","")}})},function(t,n,r){"use strict";var e=r(147)(!0);r(140)(String,"String",function(t){this._t=String(t),this._i=0},function(){var t,n=this._t,r=this._i;return r>=n.length?{value:void 0,done:!0}:(t=e(n,r),this._i+=t.length,{value:t,done:!1})})},function(t,n,r){"use strict";r(29)("link",function(t){return function(n){return t(this,"a","href",n)}})},function(t,n,r){var e=r(1),i=r(30),o=r(16);e(e.S,"String",{raw:function(t){for(var n=i(t.raw),r=o(n.length),e=arguments.length,u=[],c=0;r>c;)u.push(String(n[c++])),c<e&&u.push(String(arguments[c]));return u.join("")}})},function(t,n,r){var e=r(1);e(e.P,"String",{repeat:r(149)})},function(t,n,r){"use strict";r(29)("small",function(t){return function(){return t(this,"small","","")}})},function(t,n,r){"use strict";var e=r(1),i=r(16),o=r(148),u="startsWith",c=""[u];e(e.P+e.F*r(134)(u),"String",{startsWith:function(t){var n=o(this,t,u),r=i(Math.min(arguments.length>1?arguments[1]:void 0,n.length)),e=String(t);return c?c.call(n,e,r):n.slice(r,r+e.length)===e}})},function(t,n,r){"use strict";r(29)("strike",function(t){return function(){return t(this,"strike","","")}})},function(t,n,r){"use strict";r(29)("sub",function(t){return function(){return t(this,"sub","","")}})},function(t,n,r){"use strict";r(29)("sup",function(t){return function(){return t(this,"sup","","")}})},function(t,n,r){"use strict";r(82)("trim",function(t){return function(){return t(this,3)}})},function(t,n,r){"use strict";var e=r(3),i=r(24),o=r(10),u=r(1),c=r(28),f=r(65).KEY,a=r(4),s=r(126),l=r(81),h=r(76),v=r(7),p=r(182),d=r(153),y=r(206),g=r(205),b=r(138),m=r(2),x=r(30),w=r(50),S=r(66),_=r(70),O=r(174),E=r(31),P=r(11),j=r(72),F=E.f,M=P.f,A=O.f,N=e.Symbol,T=e.JSON,I=T&&T.stringify,k="prototype",L=v("_hidden"),R=v("toPrimitive"),C={}.propertyIsEnumerable,D=s("symbol-registry"),U=s("symbols"),W=s("op-symbols"),G=Object[k],B="function"==typeof N,V=e.QObject,z=!V||!V[k]||!V[k].findChild,q=o&&a(function(){return 7!=_(M({},"a",{get:function(){return M(this,"a",{value:7}).a}})).a})?function(t,n,r){var e=F(G,n);e&&delete G[n],M(t,n,r),e&&t!==G&&M(G,n,e)}:M,K=function(t){var n=U[t]=_(N[k]);return n._k=t,n},J=B&&"symbol"==typeof N.iterator?function(t){return"symbol"==typeof t}:function(t){return t instanceof N},Y=function(t,n,r){return t===G&&Y(W,n,r),m(t),n=w(n,!0),m(r),i(U,n)?(r.enumerable?(i(t,L)&&t[L][n]&&(t[L][n]=!1),r=_(r,{enumerable:S(0,!1)})):(i(t,L)||M(t,L,S(1,{})),t[L][n]=!0),q(t,n,r)):M(t,n,r)},H=function(t,n){m(t);for(var r,e=g(n=x(n)),i=0,o=e.length;o>i;)Y(t,r=e[i++],n[r]);return t},$=function(t,n){return void 0===n?_(t):H(_(t),n)},X=function(t){var n=C.call(this,t=w(t,!0));return!(this===G&&i(U,t)&&!i(W,t))&&(!(n||!i(this,t)||!i(U,t)||i(this,L)&&this[L][t])||n)},Q=function(t,n){if(t=x(t),n=w(n,!0),t!==G||!i(U,n)||i(W,n)){var r=F(t,n);return!r||!i(U,n)||i(t,L)&&t[L][n]||(r.enumerable=!0),r}},Z=function(t){for(var n,r=A(x(t)),e=[],o=0;r.length>o;)i(U,n=r[o++])||n==L||n==f||e.push(n);return e},tt=function(t){for(var n,r=t===G,e=A(r?W:x(t)),o=[],u=0;e.length>u;)!i(U,n=e[u++])||r&&!i(G,n)||o.push(U[n]);return o};B||(N=function(){if(this instanceof N)throw TypeError("Symbol is not a constructor!");var t=h(arguments.length>0?arguments[0]:void 0),n=function(r){this===G&&n.call(W,r),i(this,L)&&i(this[L],t)&&(this[L][t]=!1),q(this,t,S(1,r))};return o&&z&&q(G,t,{configurable:!0,set:n}),K(t)},c(N[k],"toString",function(){return this._k}),E.f=Q,P.f=Y,r(71).f=O.f=Z,r(116).f=X,r(125).f=tt,o&&!r(69)&&c(G,"propertyIsEnumerable",X,!0),p.f=function(t){return K(v(t))}),u(u.G+u.W+u.F*!B,{Symbol:N});for(var nt="hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables".split(","),rt=0;nt.length>rt;)v(nt[rt++]);for(var nt=j(v.store),rt=0;nt.length>rt;)d(nt[rt++]);u(u.S+u.F*!B,"Symbol",{for:function(t){return i(D,t+="")?D[t]:D[t]=N(t)},keyFor:function(t){if(J(t))return y(D,t);throw TypeError(t+" is not a symbol!")},useSetter:function(){z=!0},useSimple:function(){z=!1}}),u(u.S+u.F*!B,"Object",{create:$,defineProperty:Y,defineProperties:H,getOwnPropertyDescriptor:Q,getOwnPropertyNames:Z,getOwnPropertySymbols:tt}),T&&u(u.S+u.F*(!B||a(function(){var t=N();return"[null]"!=I([t])||"{}"!=I({a:t})||"{}"!=I(Object(t))})),"JSON",{stringify:function(t){if(void 0!==t&&!J(t)){for(var n,r,e=[t],i=1;arguments.length>i;)e.push(arguments[i++]);return n=e[1],"function"==typeof n&&(r=n),!r&&b(n)||(n=function(t,n){if(r&&(n=r.call(this,t,n)),!J(n))return n}),e[1]=n,I.apply(T,e)}}}),N[k][R]||r(27)(N[k],R,N[k].valueOf),l(N,"Symbol"),l(Math,"Math",!0),l(e.JSON,"JSON",!0)},function(t,n,r){"use strict";var e=r(1),i=r(127),o=r(152),u=r(2),c=r(75),f=r(16),a=r(6),s=r(3).ArrayBuffer,l=r(146),h=o.ArrayBuffer,v=o.DataView,p=i.ABV&&s.isView,d=h.prototype.slice,y=i.VIEW,g="ArrayBuffer";e(e.G+e.W+e.F*(s!==h),{ArrayBuffer:h}),e(e.S+e.F*!i.CONSTR,g,{isView:function(t){return p&&p(t)||a(t)&&y in t}}),e(e.P+e.U+e.F*r(4)(function(){return!new h(2).slice(1,void 0).byteLength}),g,{slice:function(t,n){if(void 0!==d&&void 0===n)return d.call(u(this),t);for(var r=u(this).byteLength,e=c(t,r),i=c(void 0===n?r:n,r),o=new(l(this,h))(f(i-e)),a=new v(this),s=new v(o),p=0;e<i;)s.setUint8(p++,a.getUint8(e++));return o}}),r(74)(g)},function(t,n,r){var e=r(1);e(e.G+e.W+e.F*!r(127).ABV,{DataView:r(152).DataView})},function(t,n,r){r(55)("Float32",4,function(t){return function(n,r,e){return t(this,n,r,e)}})},function(t,n,r){r(55)("Float64",8,function(t){return function(n,r,e){return t(this,n,r,e)}})},function(t,n,r){r(55)("Int16",2,function(t){return function(n,r,e){return t(this,n,r,e)}})},function(t,n,r){r(55)("Int32",4,function(t){return function(n,r,e){return t(this,n,r,e)}})},function(t,n,r){r(55)("Int8",1,function(t){return function(n,r,e){return t(this,n,r,e)}})},function(t,n,r){r(55)("Uint16",2,function(t){return function(n,r,e){return t(this,n,r,e)}})},function(t,n,r){r(55)("Uint32",4,function(t){return function(n,r,e){return t(this,n,r,e)}})},function(t,n,r){r(55)("Uint8",1,function(t){return function(n,r,e){return t(this,n,r,e)}})},function(t,n,r){r(55)("Uint8",1,function(t){return function(n,r,e){return t(this,n,r,e)}},!0)},function(t,n,r){"use strict";var e=r(166);r(118)("WeakSet",function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}},{add:function(t){return e.def(this,t,!0)}},e,!1,!0)},function(t,n,r){"use strict";var e=r(1),i=r(117)(!0);e(e.P,"Array",{includes:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),r(78)("includes")},function(t,n,r){var e=r(1),i=r(143)(),o=r(3).process,u="process"==r(45)(o);e(e.G,{asap:function(t){var n=u&&o.domain;i(n?n.bind(t):t)}})},function(t,n,r){var e=r(1),i=r(45);e(e.S,"Error",{isError:function(t){return"Error"===i(t)}})},function(t,n,r){var e=r(1);e(e.P+e.R,"Map",{toJSON:r(165)("Map")})},function(t,n,r){var e=r(1);e(e.S,"Math",{iaddh:function(t,n,r,e){var i=t>>>0,o=n>>>0,u=r>>>0;return o+(e>>>0)+((i&u|(i|u)&~(i+u>>>0))>>>31)|0}})},function(t,n,r){var e=r(1);e(e.S,"Math",{imulh:function(t,n){var r=65535,e=+t,i=+n,o=e&r,u=i&r,c=e>>16,f=i>>16,a=(c*u>>>0)+(o*u>>>16);return c*f+(a>>16)+((o*f>>>0)+(a&r)>>16)}})},function(t,n,r){var e=r(1);e(e.S,"Math",{isubh:function(t,n,r,e){var i=t>>>0,o=n>>>0,u=r>>>0;return o-(e>>>0)-((~i&u|~(i^u)&i-u>>>0)>>>31)|0}})},function(t,n,r){var e=r(1);e(e.S,"Math",{umulh:function(t,n){var r=65535,e=+t,i=+n,o=e&r,u=i&r,c=e>>>16,f=i>>>16,a=(c*u>>>0)+(o*u>>>16);return c*f+(a>>>16)+((o*f>>>0)+(a&r)>>>16)}})},function(t,n,r){"use strict";var e=r(1),i=r(17),o=r(26),u=r(11);r(10)&&e(e.P+r(124),"Object",{__defineGetter__:function(t,n){u.f(i(this),t,{get:o(n),enumerable:!0,configurable:!0})}})},function(t,n,r){"use strict";var e=r(1),i=r(17),o=r(26),u=r(11);r(10)&&e(e.P+r(124),"Object",{__defineSetter__:function(t,n){u.f(i(this),t,{set:o(n),enumerable:!0,configurable:!0})}})},function(t,n,r){var e=r(1),i=r(176)(!0);e(e.S,"Object",{entries:function(t){return i(t)}})},function(t,n,r){var e=r(1),i=r(177),o=r(30),u=r(31),c=r(131);e(e.S,"Object",{getOwnPropertyDescriptors:function(t){for(var n,r=o(t),e=u.f,f=i(r),a={},s=0;f.length>s;)c(a,n=f[s++],e(r,n));return a}})},function(t,n,r){"use strict";var e=r(1),i=r(17),o=r(50),u=r(32),c=r(31).f;r(10)&&e(e.P+r(124),"Object",{__lookupGetter__:function(t){var n,r=i(this),e=o(t,!0);do{if(n=c(r,e))return n.get}while(r=u(r))}})},function(t,n,r){"use strict";var e=r(1),i=r(17),o=r(50),u=r(32),c=r(31).f;r(10)&&e(e.P+r(124),"Object",{__lookupSetter__:function(t){var n,r=i(this),e=o(t,!0);do{if(n=c(r,e))return n.set}while(r=u(r))}})},function(t,n,r){var e=r(1),i=r(176)(!1);e(e.S,"Object",{values:function(t){return i(t)}})},function(t,n,r){"use strict";var e=r(1),i=r(3),o=r(52),u=r(143)(),c=r(7)("observable"),f=r(26),a=r(2),s=r(68),l=r(73),h=r(27),v=r(79),p=v.RETURN,d=function(t){return null==t?void 0:f(t)},y=function(t){var n=t._c;n&&(t._c=void 0,n())},g=function(t){return void 0===t._o},b=function(t){g(t)||(t._o=void 0,y(t))},m=function(t,n){a(t),this._c=void 0,this._o=t,t=new x(this);try{var r=n(t),e=r;null!=r&&("function"==typeof r.unsubscribe?r=function(){e.unsubscribe()}:f(r),this._c=r)}catch(n){return void t.error(n)}g(this)&&y(this)};m.prototype=l({},{unsubscribe:function(){b(this)}});var x=function(t){this._s=t};x.prototype=l({},{next:function(t){var n=this._s;if(!g(n)){var r=n._o;try{var e=d(r.next);if(e)return e.call(r,t)}catch(t){try{b(n)}finally{throw t}}}},error:function(t){var n=this._s;if(g(n))throw t;var r=n._o;n._o=void 0;try{var e=d(r.error);if(!e)throw t;t=e.call(r,t)}catch(t){try{y(n)}finally{throw t}}return y(n),t},complete:function(t){var n=this._s;if(!g(n)){var r=n._o;n._o=void 0;try{var e=d(r.complete);t=e?e.call(r,t):void 0}catch(t){try{y(n)}finally{throw t}}return y(n),t}}});var w=function(t){s(this,w,"Observable","_f")._f=f(t)};l(w.prototype,{subscribe:function(t){return new m(t,this._f)},forEach:function(t){var n=this;return new(o.Promise||i.Promise)(function(r,e){f(t);var i=n.subscribe({next:function(n){try{return t(n)}catch(t){e(t),i.unsubscribe()}},error:e,complete:r})})}}),l(w,{from:function(t){var n="function"==typeof this?this:w,r=d(a(t)[c]);if(r){var e=a(r.call(t));return e.constructor===n?e:new n(function(t){return e.subscribe(t)})}return new n(function(n){var r=!1;return u(function(){if(!r){try{if(v(t,!1,function(t){if(n.next(t),r)return p})===p)return}catch(t){if(r)throw t;return void n.error(t)}n.complete()}}),function(){r=!0}})},of:function(){for(var t=0,n=arguments.length,r=Array(n);t<n;)r[t]=arguments[t++];return new("function"==typeof this?this:w)(function(t){var n=!1;return u(function(){if(!n){for(var e=0;e<r.length;++e)if(t.next(r[e]),n)return;t.complete()}}),function(){n=!0}})}}),h(w.prototype,c,function(){return this}),e(e.G,{Observable:w}),r(74)("Observable")},function(t,n,r){var e=r(54),i=r(2),o=e.key,u=e.set;e.exp({defineMetadata:function(t,n,r,e){u(t,n,i(r),o(e))}})},function(t,n,r){var e=r(54),i=r(2),o=e.key,u=e.map,c=e.store;e.exp({deleteMetadata:function(t,n){var r=arguments.length<3?void 0:o(arguments[2]),e=u(i(n),r,!1);if(void 0===e||!e.delete(t))return!1;if(e.size)return!0;var f=c.get(n);return f.delete(r),!!f.size||c.delete(n)}})},function(t,n,r){var e=r(185),i=r(161),o=r(54),u=r(2),c=r(32),f=o.keys,a=o.key,s=function(t,n){var r=f(t,n),o=c(t);if(null===o)return r;var u=s(o,n);return u.length?r.length?i(new e(r.concat(u))):u:r};o.exp({getMetadataKeys:function(t){return s(u(t),arguments.length<2?void 0:a(arguments[1]))}})},function(t,n,r){var e=r(54),i=r(2),o=r(32),u=e.has,c=e.get,f=e.key,a=function(t,n,r){if(u(t,n,r))return c(t,n,r);var e=o(n);return null!==e?a(t,e,r):void 0};e.exp({getMetadata:function(t,n){return a(t,i(n),arguments.length<3?void 0:f(arguments[2]))}})},function(t,n,r){var e=r(54),i=r(2),o=e.keys,u=e.key;e.exp({getOwnMetadataKeys:function(t){
return o(i(t),arguments.length<2?void 0:u(arguments[1]))}})},function(t,n,r){var e=r(54),i=r(2),o=e.get,u=e.key;e.exp({getOwnMetadata:function(t,n){return o(t,i(n),arguments.length<3?void 0:u(arguments[2]))}})},function(t,n,r){var e=r(54),i=r(2),o=r(32),u=e.has,c=e.key,f=function(t,n,r){if(u(t,n,r))return!0;var e=o(n);return null!==e&&f(t,e,r)};e.exp({hasMetadata:function(t,n){return f(t,i(n),arguments.length<3?void 0:c(arguments[2]))}})},function(t,n,r){var e=r(54),i=r(2),o=e.has,u=e.key;e.exp({hasOwnMetadata:function(t,n){return o(t,i(n),arguments.length<3?void 0:u(arguments[2]))}})},function(t,n,r){var e=r(54),i=r(2),o=r(26),u=e.key,c=e.set;e.exp({metadata:function(t,n){return function(r,e){c(t,n,(void 0!==e?i:o)(r),u(e))}}})},function(t,n,r){var e=r(1);e(e.P+e.R,"Set",{toJSON:r(165)("Set")})},function(t,n,r){"use strict";var e=r(1),i=r(147)(!0);e(e.P,"String",{at:function(t){return i(this,t)}})},function(t,n,r){"use strict";var e=r(1),i=r(46),o=r(16),u=r(122),c=r(120),f=RegExp.prototype,a=function(t,n){this._r=t,this._s=n};r(139)(a,"RegExp String",function(){var t=this._r.exec(this._s);return{value:t,done:null===t}}),e(e.P,"String",{matchAll:function(t){if(i(this),!u(t))throw TypeError(t+" is not a regexp!");var n=String(this),r="flags"in f?String(t.flags):c.call(t),e=new RegExp(t.source,~r.indexOf("g")?r:"g"+r);return e.lastIndex=o(t.lastIndex),new a(e,n)}})},function(t,n,r){"use strict";var e=r(1),i=r(181);e(e.P,"String",{padEnd:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!1)}})},function(t,n,r){"use strict";var e=r(1),i=r(181);e(e.P,"String",{padStart:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!0)}})},function(t,n,r){"use strict";r(82)("trimLeft",function(t){return function(){return t(this,1)}},"trimStart")},function(t,n,r){"use strict";r(82)("trimRight",function(t){return function(){return t(this,2)}},"trimEnd")},function(t,n,r){r(153)("asyncIterator")},function(t,n,r){r(153)("observable")},function(t,n,r){var e=r(1);e(e.S,"System",{global:r(3)})},function(t,n,r){for(var e=r(155),i=r(28),o=r(3),u=r(27),c=r(80),f=r(7),a=f("iterator"),s=f("toStringTag"),l=c.Array,h=["NodeList","DOMTokenList","MediaList","StyleSheetList","CSSRuleList"],v=0;v<5;v++){var p,d=h[v],y=o[d],g=y&&y.prototype;if(g){g[a]||u(g,a,l),g[s]||u(g,s,d),c[d]=l;for(p in e)g[p]||i(g,p,e[p],!0)}}},function(t,n,r){var e=r(1),i=r(151);e(e.G+e.B,{setImmediate:i.set,clearImmediate:i.clear})},function(t,n,r){var e=r(3),i=r(1),o=r(121),u=r(207),c=e.navigator,f=!!c&&/MSIE .\./.test(c.userAgent),a=function(t){return f?function(n,r){return t(o(u,[].slice.call(arguments,2),"function"==typeof n?n:Function(n)),r)}:t};i(i.G+i.B+i.F*f,{setTimeout:a(e.setTimeout),setInterval:a(e.setInterval)})},function(t,n,r){r(330),r(269),r(271),r(270),r(273),r(275),r(280),r(274),r(272),r(282),r(281),r(277),r(278),r(276),r(268),r(279),r(283),r(284),r(236),r(238),r(237),r(286),r(285),r(256),r(266),r(267),r(257),r(258),r(259),r(260),r(261),r(262),r(263),r(264),r(265),r(239),r(240),r(241),r(242),r(243),r(244),r(245),r(246),r(247),r(248),r(249),r(250),r(251),r(252),r(253),r(254),r(255),r(317),r(322),r(329),r(320),r(312),r(313),r(318),r(323),r(325),r(308),r(309),r(310),r(311),r(314),r(315),r(316),r(319),r(321),r(324),r(326),r(327),r(328),r(231),r(233),r(232),r(235),r(234),r(220),r(218),r(224),r(221),r(227),r(229),r(217),r(223),r(214),r(228),r(212),r(226),r(225),r(219),r(222),r(211),r(213),r(216),r(215),r(230),r(155),r(302),r(307),r(184),r(303),r(304),r(305),r(306),r(287),r(183),r(185),r(186),r(342),r(331),r(332),r(337),r(340),r(341),r(335),r(338),r(336),r(339),r(333),r(334),r(288),r(289),r(290),r(291),r(292),r(295),r(293),r(294),r(296),r(297),r(298),r(299),r(301),r(300),r(343),r(369),r(372),r(371),r(373),r(374),r(370),r(375),r(376),r(354),r(357),r(353),r(351),r(352),r(355),r(356),r(346),r(368),r(377),r(345),r(347),r(349),r(348),r(350),r(359),r(360),r(362),r(361),r(364),r(363),r(365),r(366),r(367),r(344),r(358),r(380),r(379),r(378),t.exports=r(52)},function(t,n){function r(t,n){if("string"==typeof n)return t.insertAdjacentHTML("afterend",n);var r=t.nextSibling;return r?t.parentNode.insertBefore(n,r):t.parentNode.appendChild(n)}t.exports=r},,,,,,,,,function(t,n,r){(function(n,r){!function(n){"use strict";function e(t,n,r,e){var i=n&&n.prototype instanceof o?n:o,u=Object.create(i.prototype),c=new p(e||[]);return u._invoke=s(t,r,c),u}function i(t,n,r){try{return{type:"normal",arg:t.call(n,r)}}catch(t){return{type:"throw",arg:t}}}function o(){}function u(){}function c(){}function f(t){["next","throw","return"].forEach(function(n){t[n]=function(t){return this._invoke(n,t)}})}function a(t){function n(r,e,o,u){var c=i(t[r],t,e);if("throw"!==c.type){var f=c.arg,a=f.value;return a&&"object"==typeof a&&m.call(a,"__await")?Promise.resolve(a.__await).then(function(t){n("next",t,o,u)},function(t){n("throw",t,o,u)}):Promise.resolve(a).then(function(t){f.value=t,o(f)},u)}u(c.arg)}function e(t,r){function e(){return new Promise(function(e,i){n(t,r,e,i)})}return o=o?o.then(e,e):e()}"object"==typeof r&&r.domain&&(n=r.domain.bind(n));var o;this._invoke=e}function s(t,n,r){var e=P;return function(o,u){if(e===F)throw new Error("Generator is already running");if(e===M){if("throw"===o)throw u;return y()}for(r.method=o,r.arg=u;;){var c=r.delegate;if(c){var f=l(c,r);if(f){if(f===A)continue;return f}}if("next"===r.method)r.sent=r._sent=r.arg;else if("throw"===r.method){if(e===P)throw e=M,r.arg;r.dispatchException(r.arg)}else"return"===r.method&&r.abrupt("return",r.arg);e=F;var a=i(t,n,r);if("normal"===a.type){if(e=r.done?M:j,a.arg===A)continue;return{value:a.arg,done:r.done}}"throw"===a.type&&(e=M,r.method="throw",r.arg=a.arg)}}}function l(t,n){var r=t.iterator[n.method];if(r===g){if(n.delegate=null,"throw"===n.method){if(t.iterator.return&&(n.method="return",n.arg=g,l(t,n),"throw"===n.method))return A;n.method="throw",n.arg=new TypeError("The iterator does not provide a 'throw' method")}return A}var e=i(r,t.iterator,n.arg);if("throw"===e.type)return n.method="throw",n.arg=e.arg,n.delegate=null,A;var o=e.arg;return o?o.done?(n[t.resultName]=o.value,n.next=t.nextLoc,"return"!==n.method&&(n.method="next",n.arg=g),n.delegate=null,A):o:(n.method="throw",n.arg=new TypeError("iterator result is not an object"),n.delegate=null,A)}function h(t){var n={tryLoc:t[0]};1 in t&&(n.catchLoc=t[1]),2 in t&&(n.finallyLoc=t[2],n.afterLoc=t[3]),this.tryEntries.push(n)}function v(t){var n=t.completion||{};n.type="normal",delete n.arg,t.completion=n}function p(t){this.tryEntries=[{tryLoc:"root"}],t.forEach(h,this),this.reset(!0)}function d(t){if(t){var n=t[w];if(n)return n.call(t);if("function"==typeof t.next)return t;if(!isNaN(t.length)){var r=-1,e=function n(){for(;++r<t.length;)if(m.call(t,r))return n.value=t[r],n.done=!1,n;return n.value=g,n.done=!0,n};return e.next=e}}return{next:y}}function y(){return{value:g,done:!0}}var g,b=Object.prototype,m=b.hasOwnProperty,x="function"==typeof Symbol?Symbol:{},w=x.iterator||"@@iterator",S=x.asyncIterator||"@@asyncIterator",_=x.toStringTag||"@@toStringTag",O="object"==typeof t,E=n.regeneratorRuntime;if(E)return void(O&&(t.exports=E));E=n.regeneratorRuntime=O?t.exports:{},E.wrap=e;var P="suspendedStart",j="suspendedYield",F="executing",M="completed",A={},N={};N[w]=function(){return this};var T=Object.getPrototypeOf,I=T&&T(T(d([])));I&&I!==b&&m.call(I,w)&&(N=I);var k=c.prototype=o.prototype=Object.create(N);u.prototype=k.constructor=c,c.constructor=u,c[_]=u.displayName="GeneratorFunction",E.isGeneratorFunction=function(t){var n="function"==typeof t&&t.constructor;return!!n&&(n===u||"GeneratorFunction"===(n.displayName||n.name))},E.mark=function(t){return Object.setPrototypeOf?Object.setPrototypeOf(t,c):(t.__proto__=c,_ in t||(t[_]="GeneratorFunction")),t.prototype=Object.create(k),t},E.awrap=function(t){return{__await:t}},f(a.prototype),a.prototype[S]=function(){return this},E.AsyncIterator=a,E.async=function(t,n,r,i){var o=new a(e(t,n,r,i));return E.isGeneratorFunction(n)?o:o.next().then(function(t){return t.done?t.value:o.next()})},f(k),k[_]="Generator",k.toString=function(){return"[object Generator]"},E.keys=function(t){var n=[];for(var r in t)n.push(r);return n.reverse(),function r(){for(;n.length;){var e=n.pop();if(e in t)return r.value=e,r.done=!1,r}return r.done=!0,r}},E.values=d,p.prototype={constructor:p,reset:function(t){if(this.prev=0,this.next=0,this.sent=this._sent=g,this.done=!1,this.delegate=null,this.method="next",this.arg=g,this.tryEntries.forEach(v),!t)for(var n in this)"t"===n.charAt(0)&&m.call(this,n)&&!isNaN(+n.slice(1))&&(this[n]=g)},stop:function(){this.done=!0;var t=this.tryEntries[0],n=t.completion;if("throw"===n.type)throw n.arg;return this.rval},dispatchException:function(t){function n(n,e){return o.type="throw",o.arg=t,r.next=n,e&&(r.method="next",r.arg=g),!!e}if(this.done)throw t;for(var r=this,e=this.tryEntries.length-1;e>=0;--e){var i=this.tryEntries[e],o=i.completion;if("root"===i.tryLoc)return n("end");if(i.tryLoc<=this.prev){var u=m.call(i,"catchLoc"),c=m.call(i,"finallyLoc");if(u&&c){if(this.prev<i.catchLoc)return n(i.catchLoc,!0);if(this.prev<i.finallyLoc)return n(i.finallyLoc)}else if(u){if(this.prev<i.catchLoc)return n(i.catchLoc,!0)}else{if(!c)throw new Error("try statement without catch or finally");if(this.prev<i.finallyLoc)return n(i.finallyLoc)}}}},abrupt:function(t,n){for(var r=this.tryEntries.length-1;r>=0;--r){var e=this.tryEntries[r];if(e.tryLoc<=this.prev&&m.call(e,"finallyLoc")&&this.prev<e.finallyLoc){var i=e;break}}i&&("break"===t||"continue"===t)&&i.tryLoc<=n&&n<=i.finallyLoc&&(i=null);var o=i?i.completion:{};return o.type=t,o.arg=n,i?(this.method="next",this.next=i.finallyLoc,A):this.complete(o)},complete:function(t,n){if("throw"===t.type)throw t.arg;return"break"===t.type||"continue"===t.type?this.next=t.arg:"return"===t.type?(this.rval=this.arg=t.arg,this.method="return",this.next="end"):"normal"===t.type&&n&&(this.next=n),A},finish:function(t){for(var n=this.tryEntries.length-1;n>=0;--n){var r=this.tryEntries[n];if(r.finallyLoc===t)return this.complete(r.completion,r.afterLoc),v(r),A}},catch:function(t){for(var n=this.tryEntries.length-1;n>=0;--n){var r=this.tryEntries[n];if(r.tryLoc===t){var e=r.completion;if("throw"===e.type){var i=e.arg;v(r)}return i}}throw new Error("illegal catch attempt")},delegateYield:function(t,n,r){return this.delegate={iterator:d(t),resultName:n,nextLoc:r},"next"===this.method&&(this.arg=g),A}}}("object"==typeof n?n:"object"==typeof window?window:"object"==typeof self?self:this)}).call(n,function(){return this}(),r(158))}])</script><script src="/./main.0cf68a.js"></script><script>!function(){!function(e){var t=document.createElement("script");document.getElementsByTagName("body")[0].appendChild(t),t.setAttribute("src",e)}("/slider.e37972.js")}()</script>


    
<div class="tools-col" q-class="show:isShow,hide:isShow|isFalse" q-on="click:stop(e)">
  <div class="tools-nav header-menu">
    
    
      
      
      
    
      
      
      
    
      
      
      
    
    

    <ul style="width: 70%">
    
    
      
      <li style="width: 33.333333333333336%" q-on="click: openSlider(e, 'innerArchive')"><a href="javascript:void(0)" q-class="active:innerArchive">所有文章</a></li>
      
        
      
      <li style="width: 33.333333333333336%" q-on="click: openSlider(e, 'friends')"><a href="javascript:void(0)" q-class="active:friends">友链</a></li>
      
        
      
      <li style="width: 33.333333333333336%" q-on="click: openSlider(e, 'aboutme')"><a href="javascript:void(0)" q-class="active:aboutme">关于我</a></li>
      
        
    </ul>
  </div>
  <div class="tools-wrap">
    
    	<section class="tools-section tools-section-all" q-show="innerArchive">
        <div class="search-wrap">
          <input class="search-ipt" q-model="search" type="text" placeholder="find something…">
          <i class="icon-search icon" q-show="search|isEmptyStr"></i>
          <i class="icon-close icon" q-show="search|isNotEmptyStr" q-on="click:clearChose(e)"></i>
        </div>
        <div class="widget tagcloud search-tag">
          <p class="search-tag-wording">tag:</p>
          <label class="search-switch">
            <input type="checkbox" q-on="click:toggleTag(e)" q-attr="checked:showTags">
          </label>
          <ul class="article-tag-list" q-show="showTags">
             
              <li class="article-tag-list-item">
                <a href="javascript:void(0)" class="js-tag color4">Python笔记</a>
              </li>
             
              <li class="article-tag-list-item">
                <a href="javascript:void(0)" class="js-tag color3">Web安全笔记</a>
              </li>
            
            <div class="clearfix"></div>
          </ul>
        </div>
        <ul class="search-ul">
          <p q-show="jsonFail" style="padding: 20px; font-size: 12px;">
            缺失模块。<br>1、请确保node版本大于6.2<br>2、在博客根目录（注意不是yilia根目录）执行以下命令：<br> npm i hexo-generator-json-content --save<br><br>
            3、在根目录_config.yml里添加配置：
<pre style="font-size: 12px;" q-show="jsonFail">
  jsonContent:
    meta: false
    pages: false
    posts:
      title: true
      date: true
      path: true
      text: false
      raw: false
      content: false
      slug: false
      updated: false
      comments: false
      link: false
      permalink: false
      excerpt: false
      categories: false
      tags: true
</pre>
          </p>
          <li class="search-li" q-repeat="items" q-show="isShow">
            <a q-attr="href:path|urlformat" class="search-title"><i class="icon-quo-left icon"></i><span q-text="title"></span></a>
            <p class="search-time">
              <i class="icon-calendar icon"></i>
              <span q-text="date|dateformat"></span>
            </p>
            <p class="search-tag">
              <i class="icon-price-tags icon"></i>
              <span q-repeat="tags" q-on="click:choseTag(e, name)" q-text="name|tagformat"></span>
            </p>
          </li>
        </ul>
    	</section>
    

    
    	<section class="tools-section tools-section-friends" q-show="friends">
  		
        <ul class="search-ul">
          
            <li class="search-li">
              <a href="https://blog.csdn.net/quekai01" target="_blank" class="search-title"><i class="icon-quo-left icon"></i>我的CSDN博客</a>
            </li>
          
        </ul>
  		
    	</section>
    

    
    	<section class="tools-section tools-section-me" q-show="aboutme">
  	  	
  	  		<div class="aboutme-wrap" id="js-aboutme">很惭愧&lt;br&gt;&lt;br&gt;只做了一点微小的工作&lt;br&gt;谢谢大家</div>
  	  	
    	</section>
    
  </div>
  
</div>
    <!-- Root element of PhotoSwipe. Must have class pswp. -->
<div class="pswp" tabindex="-1" role="dialog" aria-hidden="true">

    <!-- Background of PhotoSwipe. 
         It's a separate element as animating opacity is faster than rgba(). -->
    <div class="pswp__bg"></div>

    <!-- Slides wrapper with overflow:hidden. -->
    <div class="pswp__scroll-wrap">

        <!-- Container that holds slides. 
            PhotoSwipe keeps only 3 of them in the DOM to save memory.
            Don't modify these 3 pswp__item elements, data is added later on. -->
        <div class="pswp__container">
            <div class="pswp__item"></div>
            <div class="pswp__item"></div>
            <div class="pswp__item"></div>
        </div>

        <!-- Default (PhotoSwipeUI_Default) interface on top of sliding area. Can be changed. -->
        <div class="pswp__ui pswp__ui--hidden">

            <div class="pswp__top-bar">

                <!--  Controls are self-explanatory. Order can be changed. -->

                <div class="pswp__counter"></div>

                <button class="pswp__button pswp__button--close" title="Close (Esc)"></button>

                <button class="pswp__button pswp__button--share" style="display:none" title="Share"></button>

                <button class="pswp__button pswp__button--fs" title="Toggle fullscreen"></button>

                <button class="pswp__button pswp__button--zoom" title="Zoom in/out"></button>

                <!-- Preloader demo http://codepen.io/dimsemenov/pen/yyBWoR -->
                <!-- element will get class pswp__preloader--active when preloader is running -->
                <div class="pswp__preloader">
                    <div class="pswp__preloader__icn">
                      <div class="pswp__preloader__cut">
                        <div class="pswp__preloader__donut"></div>
                      </div>
                    </div>
                </div>
            </div>

            <div class="pswp__share-modal pswp__share-modal--hidden pswp__single-tap">
                <div class="pswp__share-tooltip"></div> 
            </div>

            <button class="pswp__button pswp__button--arrow--left" title="Previous (arrow left)">
            </button>

            <button class="pswp__button pswp__button--arrow--right" title="Next (arrow right)">
            </button>

            <div class="pswp__caption">
                <div class="pswp__caption__center"></div>
            </div>

        </div>

    </div>

</div>
  </div>
</body>
</html>