<!DOCTYPE html>
<html lang="zh-CN">
<head>
  <meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=2">
<meta name="theme-color" content="#222">
<meta name="generator" content="Hexo 4.0.0">
  <link rel="apple-touch-icon" sizes="180x180" href="/images/apple-touch-icon-next.png">
  <link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32-next.png">
  <link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16-next.png">
  <link rel="mask-icon" href="/images/logo.svg" color="#222">
  <link rel="alternate" href="/atom.xml" title="AiChery" type="application/atom+xml">

<link rel="stylesheet" href="/css/main.css">

<link rel="stylesheet" href="//fonts.googleapis.com/css?family=Lato:300,300italic,400,400italic,700,700italic&display=swap&subset=latin,latin-ext">
<link rel="stylesheet" href="/lib/font-awesome/css/font-awesome.min.css">


<script id="hexo-configurations">
  var NexT = window.NexT || {};
  var CONFIG = {
    root: '/',
    scheme: 'Gemini',
    version: '7.5.0',
    exturl: false,
    sidebar: {"position":"left","display":"post","offset":12,"onmobile":false},
    copycode: {"enable":false,"show_result":false,"style":null},
    back2top: {"enable":true,"sidebar":false,"scrollpercent":false},
    bookmark: {"enable":false,"color":"#222","save":"auto"},
    fancybox: false,
    mediumzoom: false,
    lazyload: false,
    pangu: false,
    algolia: {
      appID: '',
      apiKey: '',
      indexName: '',
      hits: {"per_page":10},
      labels: {"input_placeholder":"Search for Posts","hits_empty":"We didn't find any results for the search: ${query}","hits_stats":"${hits} results found in ${time} ms"}
    },
    localsearch: {"enable":false,"trigger":"auto","top_n_per_article":1,"unescape":false,"preload":false},
    path: '',
    motion: {"enable":true,"async":false,"transition":{"post_block":"fadeIn","post_header":"slideDownIn","post_body":"slideDownIn","coll_header":"slideLeftIn","sidebar":"slideUpIn"}},
    translation: {
      copy_button: '复制',
      copy_success: '复制成功',
      copy_failure: '复制失败'
    },
    sidebarPadding: 40
  };
</script>

  <meta name="description" content="Attention 机制在近几年来大火，在自然语言处理，信号处理，计算机视觉等领域取得了很大的进展。本文关注于计算机视觉领域，讲述 Attention 机制的原理，并以医学图像分析领域为依托，介绍了Attention机制的应用。">
<meta name="keywords" content="attention">
<meta property="og:type" content="article">
<meta property="og:title" content="计算机视觉中的Attention机制">
<meta property="og:url" content="http:&#x2F;&#x2F;yoursite.com&#x2F;2019&#x2F;11&#x2F;08&#x2F;%E8%AE%A1%E7%AE%97%E6%9C%BA%E8%A7%86%E8%A7%89%E4%B8%AD%E7%9A%84Attention%E6%9C%BA%E5%88%B6&#x2F;index.html">
<meta property="og:site_name" content="AiChery">
<meta property="og:description" content="Attention 机制在近几年来大火，在自然语言处理，信号处理，计算机视觉等领域取得了很大的进展。本文关注于计算机视觉领域，讲述 Attention 机制的原理，并以医学图像分析领域为依托，介绍了Attention机制的应用。">
<meta property="og:locale" content="zh-CN">
<meta property="og:image" content="https:&#x2F;&#x2F;blog-1253296122.cos.ap-chengdu.myqcloud.com&#x2F;attention_01.png">
<meta property="og:image" content="https:&#x2F;&#x2F;blog-1253296122.cos.ap-chengdu.myqcloud.com&#x2F;attention_02.png">
<meta property="og:image" content="https:&#x2F;&#x2F;blog-1253296122.cos.ap-chengdu.myqcloud.com&#x2F;attention_03.png">
<meta property="og:image" content="https:&#x2F;&#x2F;blog-1253296122.cos.ap-chengdu.myqcloud.com&#x2F;20160707204048899.gif">
<meta property="og:image" content="https:&#x2F;&#x2F;blog-1253296122.cos.ap-chengdu.myqcloud.com&#x2F;attention_04.png">
<meta property="og:updated_time" content="2019-11-09T14:33:01.507Z">
<meta name="twitter:card" content="summary">
<meta name="twitter:image" content="https:&#x2F;&#x2F;blog-1253296122.cos.ap-chengdu.myqcloud.com&#x2F;attention_01.png">

<link rel="canonical" href="http://yoursite.com/2019/11/08/%E8%AE%A1%E7%AE%97%E6%9C%BA%E8%A7%86%E8%A7%89%E4%B8%AD%E7%9A%84Attention%E6%9C%BA%E5%88%B6/">


<script id="page-configurations">
  // https://hexo.io/docs/variables.html
  CONFIG.page = {
    sidebar: "",
    isHome: false,
    isPost: true,
    isPage: false,
    isArchive: false
  };
</script>

  <title>计算机视觉中的Attention机制 | AiChery</title>
  






  <noscript>
  <style>
  .use-motion .brand,
  .use-motion .menu-item,
  .sidebar-inner,
  .use-motion .post-block,
  .use-motion .pagination,
  .use-motion .comments,
  .use-motion .post-header,
  .use-motion .post-body,
  .use-motion .collection-header { opacity: initial; }

  .use-motion .site-title,
  .use-motion .site-subtitle {
    opacity: initial;
    top: initial;
  }

  .use-motion .logo-line-before i { left: initial; }
  .use-motion .logo-line-after i { right: initial; }
  </style>
</noscript>

</head>

<body itemscope itemtype="http://schema.org/WebPage">
  <div class="container use-motion">
    <div class="headband"></div>

    <header class="header" itemscope itemtype="http://schema.org/WPHeader">
      <div class="header-inner"><div class="site-brand-container">
  <div class="site-meta">

    <div>
      <a href="/" class="brand" rel="start">
        <span class="logo-line-before"><i></i></span>
        <span class="site-title">AiChery</span>
        <span class="logo-line-after"><i></i></span>
      </a>
    </div>
        <p class="site-subtitle">Life is about waiting for the right moment to act.</p>
  </div>

  <div class="site-nav-toggle">
    <div class="toggle" aria-label="切换导航栏">
      <span class="toggle-line toggle-line-first"></span>
      <span class="toggle-line toggle-line-middle"></span>
      <span class="toggle-line toggle-line-last"></span>
    </div>
  </div>
</div>


<nav class="site-nav">
  
  <ul id="menu" class="menu">
        <li class="menu-item menu-item-home">

    <a href="/" rel="section"><i class="fa fa-fw fa-home"></i>首页</a>

  </li>
        <li class="menu-item menu-item-about">

    <a href="/about/" rel="section"><i class="fa fa-fw fa-user"></i>关于</a>

  </li>
        <li class="menu-item menu-item-tags">

    <a href="/tags/" rel="section"><i class="fa fa-fw fa-tags"></i>标签</a>

  </li>
        <li class="menu-item menu-item-categories">

    <a href="/categories/" rel="section"><i class="fa fa-fw fa-th"></i>分类</a>

  </li>
        <li class="menu-item menu-item-archives">

    <a href="/archives/" rel="section"><i class="fa fa-fw fa-archive"></i>归档</a>

  </li>
        <li class="menu-item menu-item-简历">

    <a href="/Resume/" rel="section"><i class="fa fa-fw fa-th"></i>简历</a>

  </li>
  </ul>

</nav>
</div>
    </header>

    
  <div class="back-to-top">
    <i class="fa fa-arrow-up"></i>
    <span>0%</span>
  </div>


    <main class="main">
      <div class="main-inner">
        <div class="content-wrap">
          

          <div class="content">
            

  <div class="posts-expand">
      
  
  
  <article itemscope itemtype="http://schema.org/Article" class="post-block " lang="zh-CN">
    <link itemprop="mainEntityOfPage" href="http://yoursite.com/2019/11/08/%E8%AE%A1%E7%AE%97%E6%9C%BA%E8%A7%86%E8%A7%89%E4%B8%AD%E7%9A%84Attention%E6%9C%BA%E5%88%B6/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="https://blog-1253296122.cos.ap-chengdu.myqcloud.com/avatar.jpg">
      <meta itemprop="name" content="Wei Cheney">
      <meta itemprop="description" content="谦谦君子，温润如玉">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="AiChery">
    </span>
      <header class="post-header">
        <h1 class="post-title" itemprop="name headline">
          计算机视觉中的Attention机制
        </h1>

        <div class="post-meta">
            <span class="post-meta-item">
              <span class="post-meta-item-icon">
                <i class="fa fa-calendar-o"></i>
              </span>
              <span class="post-meta-item-text">发表于</span>

              <time title="创建时间：2019-11-08 12:26:25" itemprop="dateCreated datePublished" datetime="2019-11-08T12:26:25+08:00">2019-11-08</time>
            </span>
              <span class="post-meta-item">
                <span class="post-meta-item-icon">
                  <i class="fa fa-calendar-check-o"></i>
                </span>
                <span class="post-meta-item-text">更新于</span>
                <time title="修改时间：2019-11-09 22:33:01" itemprop="dateModified" datetime="2019-11-09T22:33:01+08:00">2019-11-09</time>
              </span>
            <span class="post-meta-item">
              <span class="post-meta-item-icon">
                <i class="fa fa-folder-o"></i>
              </span>
              <span class="post-meta-item-text">分类于</span>
                <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
                  <a href="/categories/%E8%AE%A1%E7%AE%97%E6%9C%BA%E8%A7%86%E8%A7%89/" itemprop="url" rel="index">
                    <span itemprop="name">计算机视觉</span>
                  </a>
                </span>
            </span>

          
            <span class="post-meta-item" title="阅读次数" id="busuanzi_container_page_pv" style="display: none;">
              <span class="post-meta-item-icon">
                <i class="fa fa-eye"></i>
              </span>
              <span class="post-meta-item-text">阅读次数：</span>
              <span id="busuanzi_value_page_pv"></span>
            </span>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">

      
        <p><img src="https://blog-1253296122.cos.ap-chengdu.myqcloud.com/attention_01.png" alt=""></p>
<blockquote>
<p>Attention 机制在近几年来大火，在自然语言处理，信号处理，计算机视觉等领域取得了很大的进展。本文关注于计算机视觉领域，讲述 Attention 机制的原理，并以医学图像分析领域为依托，介绍了Attention机制的应用。</p>
</blockquote>
<a id="more"></a>
<p>在胸部 X 光诊断中，医生通常先整体看片子中是否有问题，然后再集中精力看有病理可能的局部区域，最后再结合整张片观察，确定病理结论。</p>
<p><img src="https://blog-1253296122.cos.ap-chengdu.myqcloud.com/attention_02.png" alt=""></p>
<p>有两个主要分支， global and local branches，以及 a fusion branch，都是分类网络，可预测图像中是否存在病理。 给定一个图像，首先使用全局图像根据分类CNN对 global branch 进行微调。 然后，我们从全局图像中裁剪出一个 attended region，并对其进行训练以在 local branch 上进行分类。 最后，将 global branch 和 local branch 支的最后一个池化层连接起来，以对 fusion branch 进行微调。</p>
<p>思路很清晰，接下来重点分析做 attention 的：</p>
<p> $f_g^k(x,y)$ 表示最后一层卷积的第 $k$ 个 channel，$g$ 表示是 global branch. 首先，对 $f_g^k(x,y)$ 取绝对值。然后，通过计算 channels 上的最大值得到 attention heat map $H_g$. </p>
<script type="math/tex; mode=display">
H_g(x,y) = \max_k(|f_g^k(x,y)|), k \in \{1,\dots,K\}.  \;\;\;\;   （3）</script><p>$H_g$ 的值表示了 activations 的重要性。</p>
<p>上面的叫做可以称之为 <strong>hard attention</strong>，权重要么是 0，要么是 1. 下面介绍几个 <strong>soft attention</strong>.</p>
<h2 id="Squeeze-And-Excitation-Networks"><a href="#Squeeze-And-Excitation-Networks" class="headerlink" title="Squeeze-And-Excitation-Networks"></a>Squeeze-And-Excitation-Networks</h2><p><img src="https://blog-1253296122.cos.ap-chengdu.myqcloud.com/attention_03.png" alt=""></p>
<p>对于任何从 $\textbf{X}$ 到 $\textbf{U}$ 的变换 $\textbf{F}_{tr}$, 存在一个变换  $\textbf{F}_{sq}$ 压缩全局特征，随后有一个 excitation 操作   $\textbf{F}_{ex}$ ， 这是一个 self-gating 的操作，构建了一个 channel-wise 权重响应，并与 $\textbf{F}_{tr}$ 的输出进行 channel-wise 相乘。</p>
<p>Squeeze 操作的数学描述：</p>
<script type="math/tex; mode=display">
z_c = \textbf{F}_{sq}(u_c) = \frac{1}{H\times W} \sum_{i=1}^H \sum_{j=1}^W u_c(i,j)</script><p>其实就是全局平均池化（global average pooling）。Excitation 操作描述如下：</p>
<script type="math/tex; mode=display">
s = \textbf{F}_{ex}(z,W) = \sigma(g(z,W)) = \sigma(W_2\delta(W_1z))</script><p>包括 2 个卷积，$W_1$ 降低 channel 的数量，$W_2$ 再升回去。最后再与  $\textbf{F}_{tr}$ 的输出相乘，可以看做是一种 self-attention.</p>
<h2 id="Stand-Alone-Self-Attention"><a href="#Stand-Alone-Self-Attention" class="headerlink" title="Stand-Alone Self-Attention"></a>Stand-Alone Self-Attention</h2><p>卷积是现代计算机视觉系统的基本单元。最近的方法主张 going beyond convolutions 以捕获 long-range dependencies。这些工作着重于 content-based interactions（ 例如 self-attention 和 non-local means ）增强卷积模型。随之而来的自然问题是，attention 是否可以作为视觉模型的 stand-alone primitive，而不是仅仅作为卷积之上的增强。在开发和测试 pure self-attention视觉模型时，作者验证了self-attention 确实可以是有效的独立层。使用应用于 ResNet 模型的一种形式的 self-attentional 替换 spatial convolutions 的所有实例的简单过程将产生一个 fully self-attentional 的模型，该模型在 ImageNet 分类表现出色，FLOPS减少了12％，参数减少了29％。在COCO对象检测中，pure self-attention 模型与 RetinaNe t的 mAP 相匹配，而FLOPS减少了39％，参数减少了34％。详细的 ablation 研究表明，self-attention 在后面的层中使用时尤其有影响。这些结果表明，stand-alone self-attention 是视觉从业者工具箱的重要补充。</p>
<p>CNN 发展迅速，但是捕获 long range interactions 仍然具有挑战性。最近，attention 机制被广泛应用，以解决这个问题，如基于 channel-based attention 机制的 Squeeze-Excite，以及基于 spatially-aware attention 机制的 Non-local. 这些工作使用 global attention layers 作为现有网络的 add-on. 这些全局形式涉及输入的所有空间位置，将其使用限制在通常需要对原始图像进行大量降采样的small inputs 中。</p>
<p><img src="https://blog-1253296122.cos.ap-chengdu.myqcloud.com/20160707204048899.gif" alt=""></p>
<p>上图来自<a href="https://blog.csdn.net/v_july_v/article/details/51812459" target="_blank" rel="noopener">这个博客</a>. 很详细的展示了卷积过程。</p>
<p>正常的一个卷积：</p>
<script type="math/tex; mode=display">
y_{ij} = \sum_{a,b\in \mathcal{N}_k(i,j)}W_{i-a,j-b}x_{ab}</script><p><img src="https://blog-1253296122.cos.ap-chengdu.myqcloud.com/attention_04.png" alt=""></p>
<p>未完待续……</p>
<h3 id="参考资料："><a href="#参考资料：" class="headerlink" title="参考资料："></a>参考资料：</h3><p>[1] Guan, Qingji, et al. “Diagnose like a radiologist: Attention guided convolutional neural network for thorax disease classification.” <em>arXiv preprint arXiv:1801.09927</em> (2018). <a href="https://arxiv.org/abs/1801.09927" target="_blank" rel="noopener">https://arxiv.org/abs/1801.09927</a></p>
<p>[2] <a href="https://blog.csdn.net/cskywit/article/details/79569788" target="_blank" rel="noopener">https://blog.csdn.net/cskywit/article/details/79569788</a></p>
<p>[3] J. Hu, L. Shen, and G. Sun, “Squeeze-and-Excitation Networks,” in <em>CVPR 2018: Computer Vision and Pattern Recognition</em>, 2018, pp. 7132–7141.</p>
<p>[4] Ramachandran, Prajit, et al. “Stand-Alone Self-Attention in Vision Models.” <em>arXiv preprint arXiv:1906.05909</em> (2019).</p>
<p>[5] <a href="https://towardsdatascience.com/self-attention-in-computer-vision-2782727021f6" target="_blank" rel="noopener">Self-Attention In Computer Vision - towards data science</a> </p>
<p>[6] <a href="https://www.zhuanzhi.ai/document/a5b6041ba0744b14cddd1f7e45899176" target="_blank" rel="noopener">自注意力机制在计算机视觉中的应用【附PPT与视频资料】- 专知</a></p>
<p>[7] <a href="https://lilianweng.github.io/lil-log/2018/06/24/attention-attention.html" target="_blank" rel="noopener">Attention? Attention!</a></p>

    </div>

    
    
    
      
        <div class="reward-container">
  <div></div>
  <button disable="enable" onclick="var qr = document.getElementById(&quot;qr&quot;); qr.style.display = (qr.style.display === 'none') ? 'block' : 'none';">
    打赏
  </button>
  <div id="qr" style="display: none;">
      
      <div style="display: inline-block;">
        <img src="/images/wechatpay.png" alt="Wei Cheney 微信支付">
        <p>微信支付</p>
      </div>
      
      <div style="display: inline-block;">
        <img src="/images/alipay.png" alt="Wei Cheney 支付宝">
        <p>支付宝</p>
      </div>

  </div>
</div>


      <footer class="post-footer">
          <div class="post-tags">
              <a href="/tags/attention/" rel="tag"># attention</a>
          </div>

        

          <div class="post-nav">
            <div class="post-nav-next post-nav-item">
                <a href="/2019/11/07/%E7%89%B9%E5%BE%81%E9%80%89%E6%8B%A9/" rel="next" title="特征选择">
                  <i class="fa fa-chevron-left"></i> 特征选择
                </a>
            </div>

            <span class="post-nav-divider"></span>

            <div class="post-nav-prev post-nav-item">
                <a href="/2019/11/11/Non-local%20Neural%20Networks%E7%AC%94%E8%AE%B0/" rel="prev" title="Non-local Neural Networks笔记">
                  Non-local Neural Networks笔记 <i class="fa fa-chevron-right"></i>
                </a>
            </div>
          </div>
      </footer>
    
  </article>
  
  
  

  </div>


          </div>
          
    <div class="comments" id="gitalk-container"></div>

        </div>
          
  
  <div class="toggle sidebar-toggle">
    <span class="toggle-line toggle-line-first"></span>
    <span class="toggle-line toggle-line-middle"></span>
    <span class="toggle-line toggle-line-last"></span>
  </div>

  <aside class="sidebar">
    <div class="sidebar-inner">

      <ul class="sidebar-nav motion-element">
        <li class="sidebar-nav-toc">
          文章目录
        </li>
        <li class="sidebar-nav-overview">
          站点概览
        </li>
      </ul>

      <!--noindex-->
      <div class="post-toc-wrap sidebar-panel">
          <div class="post-toc motion-element"><ol class="nav"><li class="nav-item nav-level-2"><a class="nav-link" href="#Squeeze-And-Excitation-Networks"><span class="nav-number">1.</span> <span class="nav-text">Squeeze-And-Excitation-Networks</span></a></li><li class="nav-item nav-level-2"><a class="nav-link" href="#Stand-Alone-Self-Attention"><span class="nav-number">2.</span> <span class="nav-text">Stand-Alone Self-Attention</span></a><ol class="nav-child"><li class="nav-item nav-level-3"><a class="nav-link" href="#参考资料："><span class="nav-number">2.1.</span> <span class="nav-text">参考资料：</span></a></li></ol></li></ol></div>
      </div>
      <!--/noindex-->

      <div class="site-overview-wrap sidebar-panel">
        <div class="site-author motion-element" itemprop="author" itemscope itemtype="http://schema.org/Person">
  <img class="site-author-image" itemprop="image" alt="Wei Cheney"
    src="https://blog-1253296122.cos.ap-chengdu.myqcloud.com/avatar.jpg">
  <p class="site-author-name" itemprop="name">Wei Cheney</p>
  <div class="site-description" itemprop="description">谦谦君子，温润如玉</div>
</div>
<div class="site-state-wrap motion-element">
  <nav class="site-state">
      <div class="site-state-item site-state-posts">
          <a href="/archives/">
        
          <span class="site-state-item-count">17</span>
          <span class="site-state-item-name">日志</span>
        </a>
      </div>
      <div class="site-state-item site-state-categories">
            <a href="/categories/">
          
        <span class="site-state-item-count">3</span>
        <span class="site-state-item-name">分类</span></a>
      </div>
      <div class="site-state-item site-state-tags">
            <a href="/tags/">
          
        <span class="site-state-item-count">8</span>
        <span class="site-state-item-name">标签</span></a>
      </div>
  </nav>
</div>
  <div class="feed-link motion-element">
    <a href="/atom.xml" rel="alternate">
      <i class="fa fa-rss"></i>RSS
    </a>
  </div>
  <div class="links-of-author motion-element">
      <span class="links-of-author-item">
        <a href="https://github.com/chenypic" title="GitHub &amp;rarr; https:&#x2F;&#x2F;github.com&#x2F;chenypic" rel="noopener" target="_blank"><i class="fa fa-fw fa-github"></i>GitHub</a>
      </span>
      <span class="links-of-author-item">
        <a href="/mailto:cherycheny@gmail.com" title="E-Mail &amp;rarr; mailto:cherycheny@gmail.com" rel="noopener" target="_blank"><i class="fa fa-fw fa-envelope"></i>E-Mail</a>
      </span>
      <span class="links-of-author-item">
        <a href="https://weibo.com/2923640394" title="Weibo &amp;rarr; https:&#x2F;&#x2F;weibo.com&#x2F;2923640394" rel="noopener" target="_blank"><i class="fa fa-fw fa-weibo"></i>Weibo</a>
      </span>
      <span class="links-of-author-item">
        <a href="https://twitter.com/cherycheny" title="Twitter &amp;rarr; https:&#x2F;&#x2F;twitter.com&#x2F;cherycheny" rel="noopener" target="_blank"><i class="fa fa-fw fa-twitter"></i>Twitter</a>
      </span>
  </div>



      </div>

    </div>
  </aside>
  <div id="sidebar-dimmer"></div>


      </div>
    </main>

    <footer class="footer">
      <div class="footer-inner">
        

<div class="copyright">
  
  &copy; 
  <span itemprop="copyrightYear">2021</span>
  <span class="with-love">
    <i class="fa fa-user"></i>
  </span>
  <span class="author" itemprop="copyrightHolder">Wei Cheney</span>
</div>
  <div class="powered-by">由 <a href="https://hexo.io/" class="theme-link" rel="noopener" target="_blank">Hexo</a> 强力驱动 v4.0.0
  </div>
  <span class="post-meta-divider">|</span>
  <div class="theme-info">主题 – <a href="https://theme-next.org/" class="theme-link" rel="noopener" target="_blank">NexT.Gemini</a> v7.5.0
  </div>

        
<div class="busuanzi-count">
  <script async src="https://busuanzi.ibruce.info/busuanzi/2.3/busuanzi.pure.mini.js"></script>
    <span class="post-meta-item" id="busuanzi_container_site_uv" style="display: none;">
      <span class="post-meta-item-icon">
        <i class="fa fa-user"></i>
      </span>
      <span class="site-uv" title="总访客量">
        <span id="busuanzi_value_site_uv"></span>
      </span>
    </span>
    <span class="post-meta-divider">|</span>
    <span class="post-meta-item" id="busuanzi_container_site_pv" style="display: none;">
      <span class="post-meta-item-icon">
        <i class="fa fa-eye"></i>
      </span>
      <span class="site-pv" title="总访问量">
        <span id="busuanzi_value_site_pv"></span>
      </span>
    </span>
</div>












        
      </div>
    </footer>
  </div>

  
  <script src="/lib/anime.min.js"></script>
  <script src="/lib/velocity/velocity.min.js"></script>
  <script src="/lib/velocity/velocity.ui.min.js"></script>
<script src="/js/utils.js"></script><script src="/js/motion.js"></script>
<script src="/js/schemes/pisces.js"></script>
<script src="/js/next-boot.js"></script>



  
















  

  
      
<script type="text/x-mathjax-config">
    MathJax.Ajax.config.path['mhchem'] = '//cdn.jsdelivr.net/npm/mathjax-mhchem@3';

  MathJax.Hub.Config({
    tex2jax: {
      inlineMath: [ ['$', '$'], ['\\(', '\\)'] ],
      processEscapes: true,
      skipTags: ['script', 'noscript', 'style', 'textarea', 'pre', 'code']
    },
    TeX: {
        extensions: ['[mhchem]/mhchem.js'],
      equationNumbers: {
        autoNumber: 'AMS'
      }
    }
  });

  MathJax.Hub.Register.StartupHook('TeX Jax Ready', function() {
    MathJax.InputJax.TeX.prefilterHooks.Add(function(data) {
      if (data.display) {
        var next = data.script.nextSibling;
        while (next && next.nodeName.toLowerCase() === '#text') {
          next = next.nextSibling;
        }
        if (next && next.nodeName.toLowerCase() === 'br') {
          next.parentNode.removeChild(next);
        }
      }
    });
  });

  MathJax.Hub.Queue(function() {
    var all = MathJax.Hub.getAllJax(), i;
    for (i = 0; i < all.length; i += 1) {
      element = document.getElementById(all[i].inputID + '-Frame').parentNode;
      if (element.nodeName.toLowerCase() == 'li') {
        element = element.parentNode;
      }
      element.classList.add('has-jax');
    }
  });
</script>
<script>
  NexT.utils.getScript('//cdn.jsdelivr.net/npm/mathjax@2/MathJax.js?config=TeX-AMS-MML_HTMLorMML', () => {
    MathJax.Hub.Typeset();
  }, window.MathJax);
</script>

    

  

<link rel="stylesheet" href="//cdn.jsdelivr.net/npm/gitalk@1/dist/gitalk.min.css">

<script>
  NexT.utils.getScript('//cdn.jsdelivr.net/npm/gitalk@1/dist/gitalk.min.js', () => {
    var gitalk = new Gitalk({
      clientID: 'b518bd54c879c20706b4',
      clientSecret: 'f601dee7ed1d248ae971191226d140acbb9ca107',
      repo: 'PingLun',
      owner: 'chenypic',
      admin: ['chenypic'],
      id: '78dd37496a68710294f0174c569ca3c9',
        language: '',
      distractionFreeMode: 'true'
    });
    gitalk.render('gitalk-container');
  }, window.Gitalk);
</script>

</body>
</html>
