<!DOCTYPE html><html lang="zh-CN" data-theme="light"><head><meta charset="UTF-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="viewport" content="width=device-width, initial-scale=1.0,viewport-fit=cover"><title>光场资料收集（转） | Leephy,Lifei Ren</title><meta name="author" content="Lifei Ren"><meta name="copyright" content="Lifei Ren"><meta name="format-detection" content="telephone=no"><meta name="theme-color" content="ffffff"><meta name="description" content="Links About Light Field 斯坦福大学光场数据库：老牌斯坦福大学计算机图形实验室(Computer Graphics Laboratory)提供的，该数据库所在网站还提供了光场的采集设备，相机标定以及可视化工具。 斯坦福大学计算机图形实验室的Marc Levoy教授做的动画仿真，利于理解。Flash applets on some technical aspects of ph">
<meta property="og:type" content="article">
<meta property="og:title" content="光场资料收集（转）">
<meta property="og:url" content="http://renlf.cn/2024/09/05/%E5%85%89%E5%9C%BA%E8%B5%84%E6%96%99%E6%94%B6%E9%9B%86/index.html">
<meta property="og:site_name" content="Leephy,Lifei Ren">
<meta property="og:description" content="Links About Light Field 斯坦福大学光场数据库：老牌斯坦福大学计算机图形实验室(Computer Graphics Laboratory)提供的，该数据库所在网站还提供了光场的采集设备，相机标定以及可视化工具。 斯坦福大学计算机图形实验室的Marc Levoy教授做的动画仿真，利于理解。Flash applets on some technical aspects of ph">
<meta property="og:locale" content="zh_CN">
<meta property="og:image" content="https://i.loli.net/2020/05/01/gkihqEjXxJ5UZ1C.jpg">
<meta property="article:published_time" content="2024-09-05T15:26:22.000Z">
<meta property="article:modified_time" content="2024-09-06T05:44:49.974Z">
<meta property="article:author" content="Lifei Ren">
<meta property="article:tag" content="光场">
<meta property="article:tag" content="计算成像">
<meta name="twitter:card" content="summary">
<meta name="twitter:image" content="https://i.loli.net/2020/05/01/gkihqEjXxJ5UZ1C.jpg"><link rel="shortcut icon" href="/img/favicon.png"><link rel="canonical" href="http://renlf.cn/2024/09/05/%E5%85%89%E5%9C%BA%E8%B5%84%E6%96%99%E6%94%B6%E9%9B%86/index.html"><link rel="preconnect" href="//cdn.jsdelivr.net"/><link rel="preconnect" href="//busuanzi.ibruce.info"/><link rel="stylesheet" href="/css/index.css?v=4.13.0"><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@6.5.1/css/all.min.css"><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/node-snackbar@0.1.16/dist/snackbar.min.css" media="print" onload="this.media='all'"><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@fancyapps/ui@5.0.33/dist/fancybox/fancybox.min.css" media="print" onload="this.media='all'"><script>const GLOBAL_CONFIG = {
  root: '/',
  algolia: undefined,
  localSearch: undefined,
  translate: {"defaultEncoding":2,"translateDelay":0,"msgToTraditionalChinese":"繁","msgToSimplifiedChinese":"簡"},
  noticeOutdate: undefined,
  highlight: {"plugin":"highlight.js","highlightCopy":true,"highlightLang":true,"highlightHeightLimit":false},
  copy: {
    success: '复制成功',
    error: '复制错误',
    noSupport: '浏览器不支持'
  },
  relativeDate: {
    homepage: false,
    post: false
  },
  runtime: '天',
  dateSuffix: {
    just: '刚刚',
    min: '分钟前',
    hour: '小时前',
    day: '天前',
    month: '个月前'
  },
  copyright: {"limitCount":50,"languages":{"author":"作者: Lifei Ren","link":"链接: ","source":"来源: Leephy,Lifei Ren","info":"著作权归作者所有。商业转载请联系作者获得授权，非商业转载请注明出处。"}},
  lightbox: 'fancybox',
  Snackbar: {"chs_to_cht":"你已切换为繁体中文","cht_to_chs":"你已切换为简体中文","day_to_night":"你已切换为深色模式","night_to_day":"你已切换为浅色模式","bgLight":"#49b1f5","bgDark":"#1f1f1f","position":"bottom-left"},
  infinitegrid: {
    js: 'https://cdn.jsdelivr.net/npm/@egjs/infinitegrid@4.11.1/dist/infinitegrid.min.js',
    buttonText: '加载更多'
  },
  isPhotoFigcaption: true,
  islazyload: true,
  isAnchor: false,
  percent: {
    toc: true,
    rightside: true,
  },
  autoDarkmode: true
}</script><script id="config-diff">var GLOBAL_CONFIG_SITE = {
  title: '光场资料收集（转）',
  isPost: true,
  isHome: false,
  isHighlightShrink: false,
  isToc: true,
  postUpdate: '2024-09-06 13:44:49'
}</script><script>(win=>{
      win.saveToLocal = {
        set: (key, value, ttl) => {
          if (ttl === 0) return
          const now = Date.now()
          const expiry = now + ttl * 86400000
          const item = {
            value,
            expiry
          }
          localStorage.setItem(key, JSON.stringify(item))
        },
      
        get: key => {
          const itemStr = localStorage.getItem(key)
      
          if (!itemStr) {
            return undefined
          }
          const item = JSON.parse(itemStr)
          const now = Date.now()
      
          if (now > item.expiry) {
            localStorage.removeItem(key)
            return undefined
          }
          return item.value
        }
      }
    
      win.getScript = (url, attr = {}) => new Promise((resolve, reject) => {
        const script = document.createElement('script')
        script.src = url
        script.async = true
        script.onerror = reject
        script.onload = script.onreadystatechange = function() {
          const loadState = this.readyState
          if (loadState && loadState !== 'loaded' && loadState !== 'complete') return
          script.onload = script.onreadystatechange = null
          resolve()
        }

        Object.keys(attr).forEach(key => {
          script.setAttribute(key, attr[key])
        })

        document.head.appendChild(script)
      })
    
      win.getCSS = (url, id = false) => new Promise((resolve, reject) => {
        const link = document.createElement('link')
        link.rel = 'stylesheet'
        link.href = url
        if (id) link.id = id
        link.onerror = reject
        link.onload = link.onreadystatechange = function() {
          const loadState = this.readyState
          if (loadState && loadState !== 'loaded' && loadState !== 'complete') return
          link.onload = link.onreadystatechange = null
          resolve()
        }
        document.head.appendChild(link)
      })
    
      win.activateDarkMode = () => {
        document.documentElement.setAttribute('data-theme', 'dark')
        if (document.querySelector('meta[name="theme-color"]') !== null) {
          document.querySelector('meta[name="theme-color"]').setAttribute('content', '#0d0d0d')
        }
      }
      win.activateLightMode = () => {
        document.documentElement.setAttribute('data-theme', 'light')
        if (document.querySelector('meta[name="theme-color"]') !== null) {
          document.querySelector('meta[name="theme-color"]').setAttribute('content', 'ffffff')
        }
      }
      const t = saveToLocal.get('theme')
    
          const isDarkMode = window.matchMedia('(prefers-color-scheme: dark)').matches
          const isLightMode = window.matchMedia('(prefers-color-scheme: light)').matches
          const isNotSpecified = window.matchMedia('(prefers-color-scheme: no-preference)').matches
          const hasNoSupport = !isDarkMode && !isLightMode && !isNotSpecified

          if (t === undefined) {
            if (isLightMode) activateLightMode()
            else if (isDarkMode) activateDarkMode()
            else if (isNotSpecified || hasNoSupport) {
              const now = new Date()
              const hour = now.getHours()
              const isNight = hour <= 8 || hour >= 22
              isNight ? activateDarkMode() : activateLightMode()
            }
            window.matchMedia('(prefers-color-scheme: dark)').addListener(e => {
              if (saveToLocal.get('theme') === undefined) {
                e.matches ? activateDarkMode() : activateLightMode()
              }
            })
          } else if (t === 'light') activateLightMode()
          else activateDarkMode()
        
      const asideStatus = saveToLocal.get('aside-status')
      if (asideStatus !== undefined) {
        if (asideStatus === 'hide') {
          document.documentElement.classList.add('hide-aside')
        } else {
          document.documentElement.classList.remove('hide-aside')
        }
      }
    
      const detectApple = () => {
        if(/iPad|iPhone|iPod|Macintosh/.test(navigator.userAgent)){
          document.documentElement.classList.add('apple')
        }
      }
      detectApple()
    })(window)</script><link rel="stylesheet" href="/xxx.css"><meta name="generator" content="Hexo 6.3.0"><link href="https://cdn.bootcss.com/KaTeX/0.11.1/katex.min.css" rel="stylesheet" /></head><body><div id="loading-box"><div class="loading-left-bg"></div><div class="loading-right-bg"></div><div class="spinner-box"><div class="configure-border-1"><div class="configure-core"></div></div><div class="configure-border-2"><div class="configure-core"></div></div><div class="loading-word">加载中...</div></div></div><script>(()=>{
  const $loadingBox = document.getElementById('loading-box')
  const $body = document.body
  const preloader = {
    endLoading: () => {
      $body.style.overflow = ''
      $loadingBox.classList.add('loaded')
    },
    initLoading: () => {
      $body.style.overflow = 'hidden'
      $loadingBox.classList.remove('loaded')
    }
  }

  preloader.initLoading()
  window.addEventListener('load',() => { preloader.endLoading() })

  if (true) {
    document.addEventListener('pjax:send', () => { preloader.initLoading() })
    document.addEventListener('pjax:complete', () => { preloader.endLoading() })
  }
})()</script><div id="sidebar"><div id="menu-mask"></div><div id="sidebar-menus"><div class="avatar-img is-center"><img src= "" data-lazy-src="/img/avatar.png" onerror="onerror=null;src='/img/friend_404.gif'" alt="avatar"/></div><div class="sidebar-site-data site-data is-center"><a href="/archives/"><div class="headline">文章</div><div class="length-num">4</div></a><a href="/tags/"><div class="headline">标签</div><div class="length-num">7</div></a><a href="/categories/"><div class="headline">分类</div><div class="length-num">3</div></a></div><hr class="custom-hr"/><div class="menus_items"><div class="menus_item"><a class="site-page" href="/"><i class="fa-fw fas fa-home"></i><span> Home</span></a></div><div class="menus_item"><a class="site-page" href="/archives/"><i class="fa-fw fas fa-archive"></i><span> Archives</span></a></div><div class="menus_item"><a class="site-page" href="/tags/"><i class="fa-fw fas fa-tags"></i><span> Tags</span></a></div><div class="menus_item"><a class="site-page" href="/categories/"><i class="fa-fw fas fa-folder-open"></i><span> Categories</span></a></div><div class="menus_item"><a class="site-page" href="/link/"><i class="fa-fw fas fa-link"></i><span> Link</span></a></div><div class="menus_item"><a class="site-page" href="/about/"><i class="fa-fw fas fa-heart"></i><span> About</span></a></div></div></div></div><div class="post" id="body-wrap"><header class="post-bg" id="page-header" style="background-image: url('https://i.loli.net/2020/05/01/gkihqEjXxJ5UZ1C.jpg')"><nav id="nav"><span id="blog-info"><a href="/" title="Leephy,Lifei Ren"><span class="site-name">Leephy,Lifei Ren</span></a></span><div id="menus"><div class="menus_items"><div class="menus_item"><a class="site-page" href="/"><i class="fa-fw fas fa-home"></i><span> Home</span></a></div><div class="menus_item"><a class="site-page" href="/archives/"><i class="fa-fw fas fa-archive"></i><span> Archives</span></a></div><div class="menus_item"><a class="site-page" href="/tags/"><i class="fa-fw fas fa-tags"></i><span> Tags</span></a></div><div class="menus_item"><a class="site-page" href="/categories/"><i class="fa-fw fas fa-folder-open"></i><span> Categories</span></a></div><div class="menus_item"><a class="site-page" href="/link/"><i class="fa-fw fas fa-link"></i><span> Link</span></a></div><div class="menus_item"><a class="site-page" href="/about/"><i class="fa-fw fas fa-heart"></i><span> About</span></a></div></div><div id="toggle-menu"><a class="site-page" href="javascript:void(0);"><i class="fas fa-bars fa-fw"></i></a></div></div></nav><div id="post-info"><h1 class="post-title">光场资料收集（转）</h1><div id="post-meta"><div class="meta-firstline"><span class="post-meta-date"><i class="far fa-calendar-alt fa-fw post-meta-icon"></i><span class="post-meta-label">发表于</span><time class="post-meta-date-created" datetime="2024-09-05T15:26:22.000Z" title="发表于 2024-09-05 23:26:22">2024-09-05</time><span class="post-meta-separator">|</span><i class="fas fa-history fa-fw post-meta-icon"></i><span class="post-meta-label">更新于</span><time class="post-meta-date-updated" datetime="2024-09-06T05:44:49.974Z" title="更新于 2024-09-06 13:44:49">2024-09-06</time></span><span class="post-meta-categories"><span class="post-meta-separator">|</span><i class="fas fa-inbox fa-fw post-meta-icon"></i><a class="post-meta-categories" href="/categories/%E8%B5%84%E6%96%99%E6%B1%87%E6%80%BB/">资料汇总</a></span></div><div class="meta-secondline"><span class="post-meta-separator">|</span><span class="post-meta-pv-cv" id="" data-flag-title="光场资料收集（转）"><i class="far fa-eye fa-fw post-meta-icon"></i><span class="post-meta-label">阅读量:</span><span id="busuanzi_value_page_pv"><i class="fa-solid fa-spinner fa-spin"></i></span></span></div></div></div></header><main class="layout" id="content-inner"><div id="post"><article class="post-content" id="article-container"><h2 id="Links-About-Light-Field"><a href="#Links-About-Light-Field" class="headerlink" title="Links About Light Field"></a>Links About Light Field</h2><ul>
<li><a href="http://lightfield.stanford.edu/lfs.html" target="_blank" rel="external">斯坦福大学光场数据库</a>：老牌斯坦福大学计算机图形实验室(Computer Graphics Laboratory)提供的，该数据库所在网站还提供了光场的采集设备，相机标定以及可视化工具。</li>
<li>斯坦福大学计算机图形实验室的<a href="http://graphics.stanford.edu/~levoy/" target="_blank" rel="external">Marc Levoy</a>教授做的动画仿真，利于理解。<a href="https://graphics.stanford.edu/courses/cs178/applets/applets.html" target="_blank" rel="external">Flash applets on some technical aspects of photography</a>，这里面详细地介绍了相机的各种参数变化对应的光路图的变化，强烈推荐。</li>
<li><a href="http://hci-lightfield.iwr.uni-heidelberg.de/" target="_blank" rel="external">HCI光场数据集</a>，千呼万唤始出来，有好长一段时间这个数据集突然消失了（可能是在维护数据）。如今以新的面貌重现天日，真的让人喜出望外。对于其数据集，HCI提供的<a href="https://github.com/lightfield-analysis/matlab-tools" target="_blank" rel="external">解码工具</a>；这是要建立与<a href="#Middlebury">Middlebury</a>齐名的数据集（包括评价排名）的节奏啊！（ps:日后整理，很感兴趣）</li>
<li><a href="http://cseweb.ucsd.edu/~ravir/" target="_blank" rel="external"><strong>Ravi Ramamoorthi</strong></a>教授主页，一位计算成像，计算机视觉领域的大神，他所在组发表了很多高质量的文章，详情可参考他的主页。</li>
<li><a href="http://cseweb.ucsd.edu/~viscomp/projects/LF/" target="_blank" rel="external">光场实验室网站</a>，研究光场领域，隶属于<strong>Ravi Ramamoorthi</strong>教授。这里将是研究光场领域深度图像获取，三维重建以及去除高光等领域研究者的福音。</li>
<li>光场相机的缔造者以及Lytro公司的创始人<a href="http://www.eecs.berkeley.edu/Faculty/Homepages/yirenng.html/" target="_blank" rel="external">Ren Ng</a>。看这里是其创建的<a href="https://illum.lytro.com" target="_blank" rel="external">Lytro公司</a>的主页；最初Lytro的最大卖点在于<strong>先拍照后对焦</strong>，可是买账的人并不多；购买者多数是摄影爱好者以及科研机构。最近Lytro公司开始进军VR以及AR领域，看了下其设备，怎一个大字了得（Ps: 话说Ng为何去教书了？）</li>
<li><a href="http://blog.lytro.com/" target="_blank" rel="external">Lytro论坛</a>，这里有关于Lytro公司以及光场相机最新的应用，时常关注不至于落后于时代，不至于被世界残忍的抛弃。</li>
<li>MATLAB<a href="http://www.mathworks.com/matlabcentral/fileexchange/49683-light-field-toolbox-v0-4" target="_blank" rel="external">光场工具包</a>，这个工具包从事光场研究的科研人员的福利，后文中我将详细的介绍这个工具包的使用方法。</li>
<li><a href="http://optics.miloush.net/lytro/Default.aspx" target="_blank" rel="external"><strong>LytroMeltdown</strong></a>，一位布拉格大学(Charles University)的学生拆解Lytro 1.0的资料，如果你想对光场相机的内部结构有更加深入的了解的话，这个网址有丰富的介绍。（目前仅有Lytro一代的拆解并没有ILLUM的拆解，原因是过于昂贵，作者买不起）</li>
<li>cocolib<a href="http://sourceforge.net/p/cocolib/home/Home/" target="_blank" rel="external">光场工具套件</a>，这个套件实际上是一个处理凸优化问题的库，既可以用命令行操作也可Matlab界面操作。该库实现了目前集中常用的算法诸如： inverse problems，基于总变分最小化的图像分割以及矢量多标记交易成本函数；当然最重要的还有对于光场图像的分析函数套件（基于HCI发表的深度估计论文）。</li>
<li>著名的<a href="http://vision.middlebury.edu/stereo/" target="_blank" rel="external">Middlebury</a><span id="Middlebury">数据集</span>：来提供了Benchmark(左右视角的纹理图以及对应的GT深度图像)，各种算法的性能对比， 在双目深度估计领域属于最为权威的评估标准。</li>
<li>Deep learning: <a href="https://github.com/iro-cp/FCRN-DepthPrediction" target="_blank" rel="external">Depth Estimation</a>，目前基于双目，多目以及利用光场进行深度估计的算法已有很多；有些研究者利用当前比较火热的深度学习的策略对<strong>单幅图像</strong>进行深度图像的提取；我本来想跑跑人家代码，可以目前还没时间搞这些。</li>
<li><a href="http://cvlab.epfl.ch/" target="_blank" rel="external">Computer Vision Laboratory - CVLAB</a></li>
<li><a href="http://media.au.tsinghua.edu.cn/people.jsp" target="_blank" rel="external">宽带网数字媒体技术实验室</a>的<a href="http://www.liuyebin.com/" target="_blank" rel="external">Yebin Liu 刘烨斌主页</a></li>
<li>深度图+原始彩色图像转化成多视角动态gif，<a href="http://wigglemaker.ugocapeto.com/" target="_blank" rel="external">戳这里</a>，这算是深度图像的一个小小的应用。</li>
</ul>
<h2 id="Light-Field-Resources"><a href="#Light-Field-Resources" class="headerlink" title="Light Field Resources"></a><a href="https://github.com/Vincentqyw/light-field-resources/blob/master/README.md" target="_blank" rel="external">Light Field Resources</a></h2><p>This is a (work in progress) repo for collecting links to data sets, source code, and other resources related to research on light fields for computer vision.For further information and interaction within the light field community, have a look at:</p>
<ul>
<li><a href="https://plus.google.com/communities/114934462920613225440" target="_blank" rel="external">Google Community for the Matlab Light Field Toolbox</a></li>
<li><a href="http://lightfield-forum.com/en/" target="_blank" rel="external">Light Field Forum</a></li>
<li><a href="https://groups.google.com/forum/#!forum/lightfieldvision" target="_blank" rel="external">Mailing List / General Light Field Vision Google Group</a></li>
</ul>
<h2 id="Background-Information-General-Light-Field-Information"><a href="#Background-Information-General-Light-Field-Information" class="headerlink" title="Background Information / General Light Field Information"></a>Background Information / General Light Field Information</h2><ul>
<li><a href="https://en.wikipedia.org/wiki/Light_field" target="_blank" rel="external">Wikipedia</a></li>
<li><a href="http://plenoptic.info/" target="_blank" rel="external">plenoptic.info</a> provides some nice visualizations on how micro lens based plenoptic cameras work</li>
<li><a href="http://www.tgeorgiev.net/" target="_blank" rel="external">Todor Georgievs Website</a> insights into plenoptic cameras. No longer updated (?)</li>
<li><a href="http://lightfield-analysis.net/benchmark/paper/survey_cvprw_lf4cv_2017.pdf" target="_blank" rel="external">A Taxonomy and Evaluation of Dense Light Field Depth Estimation Algorithms</a> paper with an in depth overview of depth estimation approaches for 4D light fields</li>
<li><a href="https://web.stanford.edu/class/ee367/reading/levoy-lfphoto-ieee06.pdf" target="_blank" rel="external">Light Fields and Computational Imaging</a> early survey of the theory and practice of light field imaging </li>
<li><em>please add more :)</em></li>
</ul>
<h2 id="Other-Light-Field-Datasets"><a href="#Other-Light-Field-Datasets" class="headerlink" title="Other Light Field Datasets"></a>Other Light Field Datasets</h2><ul>
<li><a href="http://lightfield.stanford.edu/lfs.html" target="_blank" rel="external">The (New) Stanford Light Field Archive</a></li>
<li><a href="http://lightfields.stanford.edu/index.html" target="_blank" rel="external">Stanford Lytro Light Field Archive</a></li>
<li><a href="http://web.media.mit.edu/~gordonw/SyntheticLightFields/index.php" target="_blank" rel="external">MIT Synthetic Light Field Archive</a></li>
<li><a href="http://lightfield-analysis.net/" target="_blank" rel="external">4D Light Field Dataset (CVIA Konstanz &amp; HCI Heidelberg)</a></li>
<li><a href="http://lightfieldgroup.iwr.uni-heidelberg.de/?page_id=713" target="_blank" rel="external">HCI 4D Light Field Dataset</a></li>
<li><a href="https://www.irisa.fr/temics/demos/lightField/index.html" target="_blank" rel="external">Lytro first generation dataset</a></li>
<li><a href="http://mmspg.epfl.ch/EPFL-light-field-image-dataset" target="_blank" rel="external">EPFL Light-Field Image Dataset</a></li>
<li><a href="https://www.disneyresearch.com/project/lightfields/" target="_blank" rel="external">Disney High Spatio-Angular Resolution Light Fields</a></li>
<li><a href="https://www.eecis.udel.edu/~nianyi/LFSD.htm" target="_blank" rel="external">Light field Saliency Dataset (LFSD)</a></li>
<li><a href="https://github.com/aghasemi/lcav31" target="_blank" rel="external">LCAV-31 - A Dataset for Light Field Object Recognition</a></li>
<li><a href="http://cseweb.ucsd.edu/~viscomp/projects/LF/papers/ECCV16/LF_dataset.zip" target="_blank" rel="external">A 4D Light-Field Dataset for Material Recognition</a></li>
<li><a href="http://cseweb.ucsd.edu/~viscomp/projects/LF/papers/ICCV15/dataset.zip" target="_blank" rel="external">Data for: Occlusion-aware depth estimation using light-field cameras</a></li>
<li><a href="https://vision.in.tum.de/data/datasets/ddff12scene" target="_blank" rel="external">DDFF 12-Scene 4.5D Lightfield-Depth Benchmark</a></li>
<li><em>please add more :)</em></li>
</ul>
<h2 id="Tools"><a href="#Tools" class="headerlink" title="Tools"></a>Tools</h2><ul>
<li><a href="http://dgd.vision/Tools/LFToolbox/" target="_blank" rel="external">Matlab Light Field Toolbox</a></li>
<li><a href="http://cocolib.net/index.php/examples/lightfields" target="_blank" rel="external">cocolib light field suite</a></li>
<li><a href="https://sites.google.com/site/yunsubok/lf_geo_calib" target="_blank" rel="external">Geometric light field camera calibration toolbox</a></li>
<li><a href="https://github.com/lightfield-analysis/blender-addon" target="_blank" rel="external">Blender addon to create synthetic light field data sets</a></li>
<li><em>please add more :)</em></li>
</ul>
<h2 id="Algorithm-Source-Code"><a href="#Algorithm-Source-Code" class="headerlink" title="Algorithm Source Code"></a>Algorithm Source Code</h2><ul>
<li><a href="https://sites.google.com/site/hgjeoncv/home/depthfromlf_cvpr15" target="_blank" rel="external">Accurate Depth Map Estimation from a Lenslet Light Field Camera</a> (*LF)</li>
<li><a href="http://cseweb.ucsd.edu/~viscomp/projects/LF/papers/ICCV15/occCode.zip" target="_blank" rel="external">Occlusion-aware depth estimation using light-field cameras</a> (*LF_OCC)</li>
<li><a href="http://www.ee.nthu.edu.tw/chaotsung/rprf/index.html" target="_blank" rel="external">Empirical Bayesian Light-Field Stereo Matching by Robust Pseudo Random Field Modeling</a> (RPRF)</li>
<li><a href="https://github.com/shuozh/Spinning-Parallelogram-Operator" target="_blank" rel="external">Robust Depth Estimation for Light Field via Spinning Parallelogram Operator</a> (SPO)</li>
<li><a href="https://github.com/chshin10/epinet" target="_blank" rel="external">EPINET: A Fully-Convolutional Neural Network using Epipolar Geometry for Depth from Light Field Images</a></li>
<li><a href="https://github.com/phuang17/DeepMVS" target="_blank" rel="external">DeepMVS: Learning Multi-View Stereopsis</a></li>
<li><a href="https://github.com/hosseinjavidnia/Depth-MultiCamera" target="_blank" rel="external">Total variation-based dense depth from multicamera array</a></li>
<li><a href="https://github.com/Vincentqyw/Depth-Estimation-Light-Field/tree/master/LF_DC" target="_blank" rel="external">Depth from Combining Defocus and Correspondence Using Light-Field Cameras</a></li>
<li><a href="https://github.com/Vincentqyw/Depth-Estimation-Light-Field/tree/master/CAE" target="_blank" rel="external">Robust Light Field Depth Estimation for Noisy Scene with Occlusion</a></li>
<li><a href="https://github.com/renlifei1994/LF_DEPTH_SHADING" target="_blank" rel="external">Depth from Shading, Defocus, and Correspondence Using Light-Field Angular Coherence</a></li>
<li><em>please add more :)</em></li>
</ul>
<p><em>Where applicable, the short name in parentheses denotes the acronym used on the <a href="http://lightfield-analysis.net" target="_blank" rel="external">4D light field benchmark</a>.</em></p>
<h2 id="Workshops-amp-Tutorials"><a href="#Workshops-amp-Tutorials" class="headerlink" title="Workshops &amp; Tutorials"></a>Workshops &amp; Tutorials</h2><ul>
<li><a href="https://www.eecis.udel.edu/~yu/LF4CV/" target="_blank" rel="external">1st Workshop on Light Fields for Computer Vision @ ECCV 2014</a></li>
<li><a href="http://lightfield-analysis.net/LF4CV/" target="_blank" rel="external">2nd Workshop on Light Fields for Computer Vision @ CVPR 2017</a></li>
<li><em>please add more :)</em></li>
</ul>
<h2 id="People-Labs"><a href="#People-Labs" class="headerlink" title="People / Labs"></a>People / Labs</h2><ul>
<li><a href="https://www.cvia.uni-konstanz.de/" target="_blank" rel="external">CVIA, Computer Vision and Image Analysis, Uni Konstanz, Germany</a></li>
<li><a href="http://www.computationalimaging.org/" target="_blank" rel="external">SCI, Stanford Computational Imaging, Stanford University, USA</a></li>
<li><a href="http://lightfieldgroup.iwr.uni-heidelberg.de/?page_id=453" target="_blank" rel="external">HCI, Heidelberg Collaboratory for Image Processing, Heidelberg University, Germany</a></li>
<li><a href="https://shuozh.github.io/" target="_blank" rel="external">Home Page for Shuo Zhang, Beijingjiaotong</a></li>
<li><a href="https://www.cvia.uni-konstanz.de/code-and-datasets/" target="_blank" rel="external">Computer Vision and Image Analysis, Konstanz University </a></li>
<li><a href="https://sites.google.com/site/hgjeoncv/home" target="_blank" rel="external">Hae-Gon Jeon, Carnegie Mellon University </a></li>
<li><a href="http://marine.acfr.usyd.edu.au/research/plenoptic-imaging/" target="_blank" rel="external">Plenoptic Imaging, ACFR Marine,Sydney University </a></li>
<li><a href="http://chenlab.ece.cornell.edu/projects/MobileCamArray/#Data%20Code" target="_blank" rel="external">Advanced Multimedia Processing Lab -- Projects -- The Self-Reconfigurable Camera Array</a></li>
<li><a href="https://tcwang0509.github.io/" target="_blank" rel="external">Ting-Chun Wang's Homepage</a></li>
<li><a href="https://people.eecs.berkeley.edu/~pratul/" target="_blank" rel="external">Pratul Srinivasan， EECS Department at UC Berkeley</a></li>
<li><em>please add more :)</em></li>
</ul>
<h2 id="Deep-Learning-Related"><a href="#Deep-Learning-Related" class="headerlink" title="Deep Learning Related"></a>Deep Learning Related</h2><ul>
<li><a href="https://cn.mathworks.com/help/nnet/examples.html?s_cid=doc_flyout#bvljehw" target="_blank" rel="external">Deep Learning in Matlab</a></li>
<li><em>please add more :)</em></li>
</ul>
<h2 id="Others"><a href="#Others" class="headerlink" title="Others"></a>Others</h2><ul>
<li><a href="https://unsplash.com/" target="_blank" rel="external">Free Images</a></li>
<li><a href="http://i.mouto.org/#kodak" target="_blank" rel="external">mouto</a></li>
<li><a href="http://lab.mouto.org/" target="_blank" rel="external">Lab</a></li>
<li><a href="https://blog.metheno.net/" target="_blank" rel="external">Metheno</a></li>
<li><a href="http://x.mouto.org/wb/" target="_blank" rel="external">You Know</a></li>
</ul>
<h2 id="Convert-Latex-to-images"><a href="#Convert-Latex-to-images" class="headerlink" title="Convert Latex to images"></a>Convert Latex to images</h2><ul>
<li><a href="http://www.latex2png.com/" target="_blank" rel="external">Latex2png</a></li>
<li><a href="https://private.codecogs.com/latex/eqneditor.php" target="_blank" rel="external">Latex2html</a></li>
<li><a href="http://www.tlhiv.org/ltxpreview/" target="_blank" rel="external">Latex2svg</a></li>
</ul>
<p align="right">转自<a target="_blank" rel="noopener" href="https://www.vincentqin.tech/collections/">https://www.vincentqin.tech/collections/</a>,有补充</p>
<p>&nbsp;</p>
</article><div class="post-copyright"><div class="post-copyright__author"><span class="post-copyright-meta"><i class="fas fa-circle-user fa-fw"></i>文章作者: </span><span class="post-copyright-info"><a href="http://renlf.cn">Lifei Ren</a></span></div><div class="post-copyright__type"><span class="post-copyright-meta"><i class="fas fa-square-arrow-up-right fa-fw"></i>文章链接: </span><span class="post-copyright-info"><a href="http://renlf.cn/2024/09/05/%E5%85%89%E5%9C%BA%E8%B5%84%E6%96%99%E6%94%B6%E9%9B%86/">http://renlf.cn/2024/09/05/%E5%85%89%E5%9C%BA%E8%B5%84%E6%96%99%E6%94%B6%E9%9B%86/</a></span></div><div class="post-copyright__notice"><span class="post-copyright-meta"><i class="fas fa-circle-exclamation fa-fw"></i>版权声明: </span><span class="post-copyright-info">本博客所有文章除特别声明外，均采用 <a href="https://creativecommons.org/licenses/by-nc-sa/4.0/" target="_blank">CC BY-NC-SA 4.0</a> 许可协议。转载请注明来自 <a href="http://renlf.cn" target="_blank">Leephy,Lifei Ren</a>！</span></div></div><div class="tag_share"><div class="post-meta__tag-list"><a class="post-meta__tags" href="/tags/%E5%85%89%E5%9C%BA/">光场</a><a class="post-meta__tags" href="/tags/%E8%AE%A1%E7%AE%97%E6%88%90%E5%83%8F/">计算成像</a></div><div class="post_share"><div class="social-share" data-image="https://i.loli.net/2020/05/01/gkihqEjXxJ5UZ1C.jpg" data-sites="facebook,twitter,wechat,weibo,qq"></div><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/butterfly-extsrc@1.1.3/sharejs/dist/css/share.min.css" media="print" onload="this.media='all'"><script src="https://cdn.jsdelivr.net/npm/butterfly-extsrc@1.1.3/sharejs/dist/js/social-share.min.js" defer></script></div></div><div class="post-reward"><div class="reward-button"><i class="fas fa-qrcode"></i>赞助</div><div class="reward-main"><ul class="reward-all"><li class="reward-item"><a href="/img/wechat.jpg" target="_blank"><img class="post-qr-code-img" src= "" data-lazy-src="/img/wechat.jpg" alt="wechat"/></a><div class="post-qr-code-desc">wechat</div></li><li class="reward-item"><a href="/img/alipay.jpg" target="_blank"><img class="post-qr-code-img" src= "" data-lazy-src="/img/alipay.jpg" alt="alipay"/></a><div class="post-qr-code-desc">alipay</div></li></ul></div></div><nav class="pagination-post" id="pagination"><div class="prev-post pull-left"><a href="/2024/09/05/%E7%BB%93%E6%9E%84%E5%85%89%E7%A0%94%E7%A9%B6%E7%9B%B8%E5%85%B3%E8%B5%84%E6%96%99/" title="结构光研究相关资料"><img class="cover" src= "" data-lazy-src="https://s3.bmp.ovh/imgs/2024/09/05/5fc051e2f13a81a9.jpg" onerror="onerror=null;src='/img/404.jpg'" alt="cover of previous post"><div class="pagination-info"><div class="label">上一篇</div><div class="prev_info">结构光研究相关资料</div></div></a></div><div class="next-post pull-right"><a href="/2024/09/05/machine-vision/" title="机器视觉系统设计计算简要流程"><img class="cover" src= "" data-lazy-src="https://s3.bmp.ovh/imgs/2024/09/05/df81c4eb6602f0f9.png" onerror="onerror=null;src='/img/404.jpg'" alt="cover of next post"><div class="pagination-info"><div class="label">下一篇</div><div class="next_info">机器视觉系统设计计算简要流程</div></div></a></div></nav><hr class="custom-hr"/><div id="post-comment"><div class="comment-head"><div class="comment-headline"><i class="fas fa-comments fa-fw"></i><span> 评论</span></div></div><div class="comment-wrap"><div><div id="lv-container" data-id="city" data-uid="MTAyMC80MDEyOS8xNjY1Ng=="></div></div></div></div></div><div class="aside-content" id="aside-content"><div class="card-widget card-info"><div class="is-center"><div class="avatar-img"><img src= "" data-lazy-src="/img/avatar.png" onerror="this.onerror=null;this.src='/img/friend_404.gif'" alt="avatar"/></div><div class="author-info__name">Lifei Ren</div><div class="author-info__description">Technology, literature, art and other content sharing</div></div><div class="card-info-data site-data is-center"><a href="/archives/"><div class="headline">文章</div><div class="length-num">4</div></a><a href="/tags/"><div class="headline">标签</div><div class="length-num">7</div></a><a href="/categories/"><div class="headline">分类</div><div class="length-num">3</div></a></div><a id="card-info-btn" target="_blank" rel="noopener" href="https://github.com/renlifei1994"><i class="fab fa-github"></i><span>Follow Me</span></a><div class="card-info-social-icons is-center"><a class="social-icon" href="https://github.com/renlifei1994" target="_blank" title="Github"><i class="fab fa-github" style="color: #24292e;"></i></a><a class="social-icon" href="mailto:leephy.ren@polyu.edu.hk" target="_blank" title="Email"><i class="fas fa-envelope" style="color: #4a7dbe;"></i></a></div></div><div class="card-widget card-announcement"><div class="item-headline"><i class="fas fa-bullhorn fa-shake"></i><span>公告</span></div><div class="announcement_content">本博客于2024年9月5日升级上线，欢迎大家访问。</div></div><div class="sticky_layout"><div class="card-widget" id="card-toc"><div class="item-headline"><i class="fas fa-stream"></i><span>目录</span><span class="toc-percentage"></span></div><div class="toc-content"><ol class="toc"><li class="toc-item toc-level-2"><a class="toc-link" href="#Links-About-Light-Field"><span class="toc-number">1.</span> <span class="toc-text">Links About Light Field</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Light-Field-Resources"><span class="toc-number">2.</span> <span class="toc-text">Light Field Resources</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Background-Information-General-Light-Field-Information"><span class="toc-number">3.</span> <span class="toc-text">Background Information &#x2F; General Light Field Information</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Other-Light-Field-Datasets"><span class="toc-number">4.</span> <span class="toc-text">Other Light Field Datasets</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Tools"><span class="toc-number">5.</span> <span class="toc-text">Tools</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Algorithm-Source-Code"><span class="toc-number">6.</span> <span class="toc-text">Algorithm Source Code</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Workshops-amp-Tutorials"><span class="toc-number">7.</span> <span class="toc-text">Workshops &amp; Tutorials</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#People-Labs"><span class="toc-number">8.</span> <span class="toc-text">People &#x2F; Labs</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Deep-Learning-Related"><span class="toc-number">9.</span> <span class="toc-text">Deep Learning Related</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Others"><span class="toc-number">10.</span> <span class="toc-text">Others</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Convert-Latex-to-images"><span class="toc-number">11.</span> <span class="toc-text">Convert Latex to images</span></a></li></ol></div></div><div class="card-widget card-recent-post"><div class="item-headline"><i class="fas fa-history"></i><span>最新文章</span></div><div class="aside-list"><div class="aside-list-item"><a class="thumbnail" href="/2024/09/05/N%E6%AD%A5%E7%9B%B8%E7%A7%BB%E7%AE%97%E6%B3%95%E7%9A%84%E6%8E%A8%E5%AF%BC/" title="N步相移算法的推导"><img src= "" data-lazy-src="https://s3.bmp.ovh/imgs/2024/09/05/66839ea5f1df1c3f.jpg" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="N步相移算法的推导"/></a><div class="content"><a class="title" href="/2024/09/05/N%E6%AD%A5%E7%9B%B8%E7%A7%BB%E7%AE%97%E6%B3%95%E7%9A%84%E6%8E%A8%E5%AF%BC/" title="N步相移算法的推导">N步相移算法的推导</a><time datetime="2024-09-05T15:35:07.000Z" title="发表于 2024-09-05 23:35:07">2024-09-05</time></div></div><div class="aside-list-item"><a class="thumbnail" href="/2024/09/05/%E7%BB%93%E6%9E%84%E5%85%89%E7%A0%94%E7%A9%B6%E7%9B%B8%E5%85%B3%E8%B5%84%E6%96%99/" title="结构光研究相关资料"><img src= "" data-lazy-src="https://s3.bmp.ovh/imgs/2024/09/05/5fc051e2f13a81a9.jpg" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="结构光研究相关资料"/></a><div class="content"><a class="title" href="/2024/09/05/%E7%BB%93%E6%9E%84%E5%85%89%E7%A0%94%E7%A9%B6%E7%9B%B8%E5%85%B3%E8%B5%84%E6%96%99/" title="结构光研究相关资料">结构光研究相关资料</a><time datetime="2024-09-05T15:31:09.000Z" title="发表于 2024-09-05 23:31:09">2024-09-05</time></div></div><div class="aside-list-item"><a class="thumbnail" href="/2024/09/05/%E5%85%89%E5%9C%BA%E8%B5%84%E6%96%99%E6%94%B6%E9%9B%86/" title="光场资料收集（转）"><img src= "" data-lazy-src="https://i.loli.net/2020/05/01/gkihqEjXxJ5UZ1C.jpg" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="光场资料收集（转）"/></a><div class="content"><a class="title" href="/2024/09/05/%E5%85%89%E5%9C%BA%E8%B5%84%E6%96%99%E6%94%B6%E9%9B%86/" title="光场资料收集（转）">光场资料收集（转）</a><time datetime="2024-09-05T15:26:22.000Z" title="发表于 2024-09-05 23:26:22">2024-09-05</time></div></div><div class="aside-list-item"><a class="thumbnail" href="/2024/09/05/machine-vision/" title="机器视觉系统设计计算简要流程"><img src= "" data-lazy-src="https://s3.bmp.ovh/imgs/2024/09/05/df81c4eb6602f0f9.png" onerror="this.onerror=null;this.src='/img/404.jpg'" alt="机器视觉系统设计计算简要流程"/></a><div class="content"><a class="title" href="/2024/09/05/machine-vision/" title="机器视觉系统设计计算简要流程">机器视觉系统设计计算简要流程</a><time datetime="2024-09-05T14:34:22.000Z" title="发表于 2024-09-05 22:34:22">2024-09-05</time></div></div></div></div></div></div></main><footer id="footer"><div id="footer-wrap"><div class="copyright">&copy;2020 - 2024 By Lifei Ren</div><div class="framework-info"><span>框架 </span><a target="_blank" rel="noopener" href="https://hexo.io">Hexo</a><span class="footer-separator">|</span><span>主题 </span><a target="_blank" rel="noopener" href="https://github.com/jerryc127/hexo-theme-butterfly">Butterfly</a></div></div></footer></div><div id="rightside"><div id="rightside-config-hide"><button id="readmode" type="button" title="阅读模式"><i class="fas fa-book-open"></i></button><button id="translateLink" type="button" title="简繁转换">簡</button><button id="darkmode" type="button" title="浅色和深色模式转换"><i class="fas fa-adjust"></i></button><button id="hide-aside-btn" type="button" title="单栏和双栏切换"><i class="fas fa-arrows-alt-h"></i></button></div><div id="rightside-config-show"><button id="rightside-config" type="button" title="设置"><i class="fas fa-cog fa-spin"></i></button><button class="close" id="mobile-toc-button" type="button" title="目录"><i class="fas fa-list-ul"></i></button><a id="to_comment" href="#post-comment" title="直达评论"><i class="fas fa-comments"></i></a><button id="go-up" type="button" title="回到顶部"><span class="scroll-percent"></span><i class="fas fa-arrow-up"></i></button></div></div><div><script src="/js/utils.js?v=4.13.0"></script><script src="/js/main.js?v=4.13.0"></script><script src="/js/tw_cn.js?v=4.13.0"></script><script src="https://cdn.jsdelivr.net/npm/@fancyapps/ui@5.0.33/dist/fancybox/fancybox.umd.min.js"></script><script src="https://cdn.jsdelivr.net/npm/instant.page@5.2.0/instantpage.min.js" type="module"></script><script src="https://cdn.jsdelivr.net/npm/vanilla-lazyload@17.8.8/dist/lazyload.iife.min.js"></script><script src="https://cdn.jsdelivr.net/npm/node-snackbar@0.1.16/dist/snackbar.min.js"></script><script>function panguFn () {
  if (typeof pangu === 'object') pangu.autoSpacingPage()
  else {
    getScript('https://cdn.jsdelivr.net/npm/pangu@4.0.7/dist/browser/pangu.min.js')
      .then(() => {
        pangu.autoSpacingPage()
      })
  }
}

function panguInit () {
  if (false){
    GLOBAL_CONFIG_SITE.isPost && panguFn()
  } else {
    panguFn()
  }
}

document.addEventListener('DOMContentLoaded', panguInit)</script><div class="js-pjax"><script>if (!window.MathJax) {
  window.MathJax = {
    tex: {
      inlineMath: [['$', '$'], ['\\(', '\\)']],
      tags: 'ams'
    },
    chtml: {
      scale: 1.1
    },
    options: {
      renderActions: {
        findScript: [10, doc => {
          for (const node of document.querySelectorAll('script[type^="math/tex"]')) {
            const display = !!node.type.match(/; *mode=display/)
            const math = new doc.options.MathItem(node.textContent, doc.inputJax[0], display)
            const text = document.createTextNode('')
            node.parentNode.replaceChild(text, node)
            math.start = {node: text, delim: '', n: 0}
            math.end = {node: text, delim: '', n: 0}
            doc.math.push(math)
          }
        }, '']
      }
    }
  }
  
  const script = document.createElement('script')
  script.src = 'https://cdn.jsdelivr.net/npm/mathjax@3.2.2/es5/tex-mml-chtml.min.js'
  script.id = 'MathJax-script'
  script.async = true
  document.head.appendChild(script)
} else {
  MathJax.startup.document.state(0)
  MathJax.texReset()
  MathJax.typesetPromise()
}</script><link rel="stylesheet" type="text/css" href="https://cdn.jsdelivr.net/npm/katex@0.16.9/dist/katex.min.css"><script src="https://cdn.jsdelivr.net/npm/katex@0.16.9/dist/contrib/copy-tex.min.js"></script><script>(() => {
  document.querySelectorAll('#article-container span.katex-display').forEach(item => {
    btf.wrap(item, 'div', { class: 'katex-wrap'})
  })
})()</script><script>(()=>{
  const loadLivere = () => {
    if (typeof LivereTower === 'object') window.LivereTower.init()
    else {
      (function(d, s) {
          var j, e = d.getElementsByTagName(s)[0];
          if (typeof LivereTower === 'function') { return; }
          j = d.createElement(s);
          j.src = 'https://cdn-city.livere.com/js/embed.dist.js';
          j.async = true;
          e.parentNode.insertBefore(j, e);
      })(document, 'script');
    }
  }

  if ('Livere' === 'Livere' || !false) {
    if (false) btf.loadComment(document.getElementById('lv-container'), loadLivere)
    else loadLivere()
  } else {
    window.loadOtherComment = loadLivere
  }
})()</script></div><script src="xxxx"></script><canvas class="fireworks" mobile="false"></canvas><script src="https://cdn.jsdelivr.net/npm/butterfly-extsrc@1.1.3/dist/fireworks.min.js"></script><script id="canvas_nest" defer="defer" color="0,0,255" opacity="0.7" zIndex="-1" count="99" mobile="false" src="https://cdn.jsdelivr.net/npm/butterfly-extsrc@1.1.3/dist/canvas-nest.min.js"></script><script src="https://cdn.jsdelivr.net/npm/butterfly-extsrc@1.1.3/dist/activate-power-mode.min.js"></script><script>POWERMODE.colorful = true;
POWERMODE.shake = true;
POWERMODE.mobile = false;
document.body.addEventListener('input', POWERMODE);
</script><script src="https://cdn.jsdelivr.net/npm/pjax@0.2.8/pjax.min.js"></script><script>let pjaxSelectors = ["meta[property=\"og:image\"]","meta[property=\"og:title\"]","meta[property=\"og:url\"]","head > title","#config-diff","#body-wrap","#rightside-config-hide","#rightside-config-show",".js-pjax"]

var pjax = new Pjax({
  elements: 'a:not([target="_blank"])',
  selectors: pjaxSelectors,
  cacheBust: false,
  analytics: false,
  scrollRestoration: false
})

document.addEventListener('pjax:send', function () {

  // removeEventListener
  btf.removeGlobalFnEvent('pjax')
  btf.removeGlobalFnEvent('themeChange')

  document.getElementById('rightside').classList.remove('rightside-show')
  
  if (window.aplayers) {
    for (let i = 0; i < window.aplayers.length; i++) {
      if (!window.aplayers[i].options.fixed) {
        window.aplayers[i].destroy()
      }
    }
  }

  typeof typed === 'object' && typed.destroy()

  //reset readmode
  const $bodyClassList = document.body.classList
  $bodyClassList.contains('read-mode') && $bodyClassList.remove('read-mode')

  typeof disqusjs === 'object' && disqusjs.destroy()
})

document.addEventListener('pjax:complete', function () {
  window.refreshFn()

  document.querySelectorAll('script[data-pjax]').forEach(item => {
    const newScript = document.createElement('script')
    const content = item.text || item.textContent || item.innerHTML || ""
    Array.from(item.attributes).forEach(attr => newScript.setAttribute(attr.name, attr.value))
    newScript.appendChild(document.createTextNode(content))
    item.parentNode.replaceChild(newScript, item)
  })

  GLOBAL_CONFIG.islazyload && window.lazyLoadInstance.update()

  typeof panguInit === 'function' && panguInit()

  // google analytics
  typeof gtag === 'function' && gtag('config', '', {'page_path': window.location.pathname});

  // baidu analytics
  typeof _hmt === 'object' && _hmt.push(['_trackPageview',window.location.pathname]);

  typeof loadMeting === 'function' && document.getElementsByClassName('aplayer').length && loadMeting()

  // prismjs
  typeof Prism === 'object' && Prism.highlightAll()
})

document.addEventListener('pjax:error', e => {
  if (e.request.status === 404) {
    pjax.loadUrl('/404.html')
  }
})</script><script async data-pjax src="//busuanzi.ibruce.info/busuanzi/2.3/busuanzi.pure.mini.js"></script></div></body></html>