<!DOCTYPE html><html lang="zh-CN" data-theme="light"><head><script>(()=>{const e=navigator.serviceWorker;e?e.register("/sw.js").then((async e=>{console.log("SWPP 注册成功");try{await e.periodicSync.register("update",{minInterval:864e5})}catch(e){console.log("Periodic Sync 注册失败",e)}})).catch((e=>console.error("SWPP 注册失败",e))):console.warn("当前浏览器不支持 SW")})()</script><meta charset="UTF-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="viewport" content="width=device-width,initial-scale=1,viewport-fit=cover"><title>Spikformer脉冲神经网络学习 | LiuShen's Blog</title><meta name="author" content="LiuShen"><meta name="copyright" content="LiuShen"><meta name="format-detection" content="telephone=no"><meta name="theme-color" content="#ffffff"><meta name="robots" content="index, follow"><meta name="description" content="近期我们进行了人工智能实训，我们小组选择的是脉冲神经网络，不同于原先的神经网络，这个网络采用的是脉冲信号，目前脉冲神经网络的效果并不是很好，但是因为是一个全新的神经网络架构，并且基于生物启发的计算方式，使得它们在处理稀疏和非结构化数据时具有独特的优势。"><meta property="og:type" content="article"><meta property="og:title" content="Spikformer脉冲神经网络学习"><meta property="og:url" content="https://blog.liushen.fun/posts/67189760/"><meta property="og:site_name" content="LiuShen's Blog"><meta property="og:description" content="近期我们进行了人工智能实训，我们小组选择的是脉冲神经网络，不同于原先的神经网络，这个网络采用的是脉冲信号，目前脉冲神经网络的效果并不是很好，但是因为是一个全新的神经网络架构，并且基于生物启发的计算方式，使得它们在处理稀疏和非结构化数据时具有独特的优势。"><meta property="og:locale" content="zh_CN"><meta property="og:image" content="https://p.liiiu.cn/i/2024/07/06/6688d4b63b50f.webp"><meta property="article:published_time" content="2024-07-06T05:33:21.000Z"><meta property="article:modified_time" content="2024-07-06T05:33:21.000Z"><meta property="article:author" content="LiuShen"><meta property="article:tag" content="学习笔记"><meta property="article:tag" content="机器学习"><meta property="article:tag" content="神经网络"><meta name="twitter:card" content="summary"><meta name="twitter:image" content="https://p.liiiu.cn/i/2024/07/06/6688d4b63b50f.webp"><link rel="shortcut icon" href="/favicon.ico"><link rel="canonical" href="https://blog.liushen.fun/posts/67189760/"><link rel="preconnect" href="https://jsd.liiiu.cn"><meta name="baidu-site-verification" content="codeva-i2bF9bXDJs"><meta name="sogou_site_verification" content="1rIjtK6RhE"><meta name="360-site-verification" content="b9a9f96ae38a1932a326f8ba255cc74b"><meta name="msvalidate.01" content="F7C246820EB04DD140494C98F93A97C7"><meta name="google-site-verification" content="VH3ZE6m42MZDqpTY2tD-XoxDt9iUj2HoiC3W2neB7F4"><link rel="manifest" href="/manifest.json"><link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png"><link rel="icon" type="image/png" sizes="32x32" href="/config/img/pwa/favicon-32x32.png"><link rel="icon" type="image/png" sizes="16x16" href="/config/img/pwa/favicon-16x16.png"><link rel="mask-icon" href="/config/img/pwa/favicon-mask.svg" color="#5bbad5"><link rel="stylesheet" href="/css/index.css?v=5.0.0"><link rel="stylesheet" href="/css/custom.css?v=5.0.0"><link rel="stylesheet" href="https://jsd.liiiu.cn/npm/@fortawesome/fontawesome-free@6.7.2/css/all.min.css"><link rel="stylesheet" href="https://jsd.liiiu.cn/npm/@fancyapps/ui@5.0.36/dist/fancybox/fancybox.min.css" media="print" onload='this.media="all"'><script>(()=>{const e={set:(e,t,o)=>{if(!o)return;const n=Date.now()+864e5*o;localStorage.setItem(e,JSON.stringify({value:t,expiry:n}))},get:e=>{const t=localStorage.getItem(e);if(!t)return;const{value:o,expiry:n}=JSON.parse(t);if(!(Date.now()>n))return o;localStorage.removeItem(e)}};window.btf={saveToLocal:e,getScript:(e,t={})=>new Promise(((o,n)=>{const a=document.createElement("script");a.src=e,a.async=!0,Object.entries(t).forEach((([e,t])=>a.setAttribute(e,t))),a.onload=a.onreadystatechange=()=>{a.readyState&&!/loaded|complete/.test(a.readyState)||o()},a.onerror=n,document.head.appendChild(a)})),getCSS:(e,t)=>new Promise(((o,n)=>{const a=document.createElement("link");a.rel="stylesheet",a.href=e,t&&(a.id=t),a.onload=a.onreadystatechange=()=>{a.readyState&&!/loaded|complete/.test(a.readyState)||o()},a.onerror=n,document.head.appendChild(a)})),addGlobalFn:(e,t,o=!1,n=window)=>{const a=n.globalFn||{};a[e]=a[e]||{},o&&a[e][o]||(a[e][o||Object.keys(a[e]).length]=t,n.globalFn=a)}};const t=()=>{document.documentElement.setAttribute("data-theme","dark"),null!==document.querySelector('meta[name="theme-color"]')&&document.querySelector('meta[name="theme-color"]').setAttribute("content","#0d0d0d")},o=()=>{document.documentElement.setAttribute("data-theme","light"),null!==document.querySelector('meta[name="theme-color"]')&&document.querySelector('meta[name="theme-color"]').setAttribute("content","#ffffff")};btf.activateDarkMode=t,btf.activateLightMode=o;const n=e.get("theme"),a=(new Date).getHours();void 0===n?a<=6||a>=18?t():o():"light"===n?o():t();const r=e.get("aside-status");void 0!==r&&document.documentElement.classList.toggle("hide-aside","hide"===r);/iPad|iPhone|iPod|Macintosh/.test(navigator.userAgent)&&document.documentElement.classList.add("apple")})()</script><script>const GLOBAL_CONFIG={root:"/",algolia:{appId:"7IX3UBC6JW",apiKey:"4ac2846352e499675081f1277fb961c1",indexName:"My Blog",hitsPerPage:6,languages:{input_placeholder:"搜索全站文章",hits_empty:"未找到符合您查询的内容：${query}",hits_stats:"找到 ${hits} 条结果，耗时 ${time} 毫秒"}},localSearch:void 0,translate:{defaultEncoding:2,translateDelay:0,msgToTraditionalChinese:"繁",msgToSimplifiedChinese:"簡"},noticeOutdate:{limitDay:365,position:"top",messagePrev:"本篇文章从发布到现在已经隔了",messageNext:"天了，里面的内容可能过期了，你要自己甄别一下哟👉👈"},highlight:{plugin:"highlight.js",highlightCopy:!0,highlightLang:!0,highlightHeightLimit:400,highlightFullpage:!1,highlightMacStyle:!0},copy:{success:"😋复制啦！请注意版权信息呀！",error:"😪呜呜，复制失败了！",noSupport:"🤐浏览器不支持呢，你用的啥玩意？"},relativeDate:{homepage:!1,post:!1},runtime:"天",dateSuffix:{just:"刚刚",min:"分钟前",hour:"小时前",day:"天前",month:"个月前"},copyright:{limitCount:1e3,languages:{author:"作者: LiuShen",link:"链接: ",source:"来源: LiuShen's Blog",info:"著作权归作者所有。商业转载请联系作者获得授权，非商业转载请注明出处。"}},lightbox:"fancybox",Snackbar:{chs_to_cht:"已切换为繁体中文",cht_to_chs:"已切换为简体中文",day_to_night:"已切换为深色模式",night_to_day:"已切换为浅色模式",bgLight:"rgba(255, 255, 255, 0.509)",bgDark:"rgba(48, 48, 48, 0.509)",position:"top-right"},infinitegrid:{js:"https://jsd.liiiu.cn/npm/@egjs/infinitegrid@4.12.0/dist/infinitegrid.min.js",buttonText:"加载更多"},isPhotoFigcaption:!0,islazyload:!0,isAnchor:!1,percent:{toc:!0,rightside:!1},autoDarkmode:!1}</script><script id="config-diff">var GLOBAL_CONFIG_SITE={title:"Spikformer脉冲神经网络学习",isPost:!0,isHome:!1,isHighlightShrink:void 0,isToc:!0,postUpdate:"2024-07-06 13:33:21"}</script><link rel="stylesheet" href="https://jsd.liiiu.cn/gh/willow-god/Sharding-fonts/Yozai-Medium/result.min.css"><link rel="stylesheet" href="/config/memos/memos.css"><script defer src="/sw-dom.js"></script><link rel="stylesheet" href="https://jsd.liiiu.cn/npm/swiper@11.1.14/swiper-bundle.min.css" media="print" onload='this.media="all"'><link rel="stylesheet" href="/config/swiper/swiperstyle.css" media="print" onload='this.media="all"'><meta name="generator" content="Hexo 7.3.0"><link rel="alternate" href="/atom.xml" title="LiuShen's Blog" type="application/atom+xml"></head><body><div class="float-box right top"></div><div id="loading-box"><div class="loading-left-bg"></div><div class="loading-right-bg"></div><img class="load-image" src="" data-lazy-src="/config/img/preloader.gif" alt=""></div><script async>(()=>{const e=document.getElementById("loading-box"),d=document.body,t=()=>{d.style.overflow="",e.classList.add("loaded")},n=()=>{d.style.overflow="hidden",e.classList.remove("loaded")};n();let o=!1;window.addEventListener("load",(()=>{o||(t(),o=!0)})),setTimeout((()=>{o||(t(),o=!0)}),5e3),window.addEventListener("load",(()=>{t()})),setTimeout((function(){t()}),3e3),document.getElementById("loading-box").addEventListener("click",(()=>{t()})),document.addEventListener("pjax:send",(()=>{n()})),document.addEventListener("pjax:complete",(()=>{t()}))})()</script><div id="web_bg"></div><div id="sidebar"><div id="menu-mask"></div><div id="sidebar-menus"><div class="add-menu-container"><div id="travellings"><a class="site-page" href="https://www.travellings.cn/go.html" title="友链接力-随机开往" target="_blank" rel="noopener nofollow"><i class="fa-solid fa-bus fa-fw"></i></a></div><div id="ten-years"><a class="site-page" href="https://foreverblog.cn/go.html" title="友链接力-十年之约" target="_blank" rel="noopener nofollow"><i class="fa-brands fa-nfc-symbol fa-fw"></i></a></div></div><div class="is-center" id="sidebar-avatar"><div class="avatar-img is-center"><img class="mood-icon" src="" data-lazy-src="https://p.liiiu.cn/i/2025/03/14/67d301461a24a.webp" alt="🤤" onerror='this.onerror=null,this.src="/img/friend_404.gif"'><img src="" data-lazy-src="https://p.liiiu.cn/i/2025/03/13/67d2fc82d329c.webp" onerror='this.onerror=null,this.src="/img/friend_404.gif"' alt="avatar"></div><div class="author-info__name">LiuShen</div></div><div class="site-data is-center"><a href="/archives/"><div class="headline">文章</div><div class="length-num">72</div></a><a href="/tags/"><div class="headline">标签</div><div class="length-num">98</div></a><a href="/categories/"><div class="headline">分类</div><div class="length-num">3</div></a></div><a class="button--animated" id="card-info-btn" target="_blank" rel="noopener" href="https://github.com/willow-god"><i class="fab fa-github"></i><span>Follow Me 🛫</span></a><div class="menus_items visible"><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fas fa-home"></i> <span>导航</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" target="_blank" rel="noopener" href="https://www.liushen.fun/"><i class="fa-fw fa-solid fa-house-flag"></i> <span>个人主页</span></a></li><li><a class="site-page child" target="_blank" rel="noopener" href="https://xc.liushen.fun/"><i class="fa-fw fa fa-camera-retro"></i> <span>个人相册</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa fa-graduation-cap"></i> <span>整理</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" href="/archives/"><i class="fa-fw fas fa-archive"></i> <span>时光卷轴</span></a></li><li><a class="site-page child" href="/tags/"><i class="fa-fw fas fa-tags"></i> <span>文章标签</span></a></li><li><a class="site-page child" href="/categories/"><i class="fa-fw fas fa-folder-open"></i> <span>文章分类</span></a></li><li><a class="site-page child" href="/charts/"><i class="fa-fw fa-solid fa-chart-pie"></i> <span>文章通览</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa-solid fa-user-group"></i> <span>友人</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" href="/link/"><i class="fa-fw fas fa-link"></i> <span>友链展示</span></a></li><li><a class="site-page child" href="/addlink/"><i class="fa-fw fa fa-at"></i> <span>友链申请</span></a></li><li><a class="site-page child" href="/fcircle/"><i class="fa-fw fa-solid fa-circle-nodes"></i> <span>朋友动态</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa fa-paper-plane"></i> <span>留言</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" href="/comment/"><i class="fa-fw fa-solid fa-chalkboard"></i> <span>留言白板</span></a></li><li><a class="site-page child" target="_blank" rel="noopener" href="https://mm.liushen.fun/"><i class="fa-fw fa-solid fa-pen-nib"></i> <span>提笔摘星</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa-solid fa-stethoscope"></i> <span>分享</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" target="_blank" rel="noopener" href="https://gist.liushen.fun/"><i class="fa-fw fa-solid fa-code"></i> <span>代码片段</span></a></li><li><a class="site-page child" href="/subscribe/"><i class="fa-fw fa-solid fa-rss"></i> <span>订阅本站</span></a></li><li><a class="site-page child" target="_blank" rel="noopener" href="https://pan.liushen.fun/"><i class="fa-fw fa-solid fa-laptop-file"></i> <span>清羽云盘</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa fa-list"></i> <span>关于</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" href="/about/"><i class="fa-fw fa fa-address-card"></i> <span>站长资料</span></a></li><li><a class="site-page child" href="/shuoshuo/"><i class="fa-fw fa fa-commenting"></i> <span>日常说说</span></a></li><li><a class="site-page child" href="/devices/"><i class="fa-fw fa-solid fa-tachograph-digital"></i> <span>我的设备</span></a></li></ul></div></div></div></div><div class="post" id="body-wrap"><header class="post-bg fixed" id="page-header" style="background-image:url(https://p.liiiu.cn/i/2024/07/06/6688d4b63b50f.webp)"><nav id="nav"><div class="more-app-btn"><i class="fa-solid fa-fingerprint fa-fw"></i><div class="more-app-list-groups"><div class="more-app-list-groups-container"><div class="more-app-list-group"><div class="more-app-list-title">🦄常用网站</div><div class="more-app-list"><a class="more-app-list-item" href="https://www.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-house"></i><span class="more-app-item-text">个人主页</span></a><a class="more-app-list-item" href="https://blog.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-blog"></i><span class="more-app-item-text">个人博客</span></a><a class="more-app-list-item" href="https://xc.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-image"></i><span class="more-app-item-text">个人相册</span></a><a class="more-app-list-item" href="https://github.com/willow-god" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-brands fa-github"></i><span class="more-app-item-text">Github</span></a></div></div><div class="more-app-list-group"><div class="more-app-list-title">😎周边站点</div><div class="more-app-list"><a class="more-app-list-item" href="https://gist.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-code"></i><span class="more-app-item-text">代码片段</span></a><a class="more-app-list-item" href="https://mm.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-pen-nib"></i><span class="more-app-item-text">提笔摘星</span></a><a class="more-app-list-item" href="https://pan.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-cloud"></i><span class="more-app-item-text">清羽云盘</span></a><a class="more-app-list-item" href="https://um.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-people-group"></i><span class="more-app-item-text">访客统计</span></a><a class="more-app-list-item" href="https://img.liiiu.cn/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-images"></i><span class="more-app-item-text">清羽图床</span></a><a class="more-app-list-item" href="https://chat.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-robot"></i><span class="more-app-item-text">清羽AI</span></a><a class="more-app-list-item" href="https://bsz.liiiu.cn/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-chart-pie"></i><span class="more-app-item-text">不蒜计数</span></a><a class="more-app-list-item" href="https://status.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-server"></i><span class="more-app-item-text">服务检测</span></a></div></div><div class="more-app-list-group"><div class="more-app-list-title">🚁实用工具</div><div class="more-app-list"><a class="more-app-list-item" href="https://tmail.qyliu.top/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-envelope"></i><span class="more-app-item-text">临时邮箱</span></a><a class="more-app-list-item" href="https://hot.liushen.fun/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-fire"></i><span class="more-app-item-text">每日热榜</span></a><a class="more-app-list-item" href="https://cover.qyliu.top/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-eye"></i><span class="more-app-item-text">封面设计</span></a><a class="more-app-list-item" href="https://mini-cover.qyliu.top/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-image"></i><span class="more-app-item-text">迷你封面</span></a><a class="more-app-list-item" href="https://icon.qyliu.top/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-icons"></i><span class="more-app-item-text">万能图标</span></a><a class="more-app-list-item" href="https://draw.qyliu.top/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-paintbrush"></i><span class="more-app-item-text">灵感白板</span></a><a class="more-app-list-item" href="https://mindmap.qyliu.top/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-network-wired"></i><span class="more-app-item-text">思维导图</span></a><a class="more-app-list-item" href="https://share.liushen.fun/#/" rel="nofollow noopener" target="_blank"><i class="more-app-item-icon fa-solid fa-share-nodes"></i><span class="more-app-item-text">内容中转</span></a></div></div></div></div></div><span id="blog-info"><a class="nav-site-title" href="/" title="LiuShen's Blog"><span class="site-name">LiuShen</span></a></span><div id="menus"></div><div class="menus_items visible"><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fas fa-home"></i> <span>导航</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" target="_blank" rel="noopener" href="https://www.liushen.fun/"><i class="fa-fw fa-solid fa-house-flag"></i> <span>个人主页</span></a></li><li><a class="site-page child" target="_blank" rel="noopener" href="https://xc.liushen.fun/"><i class="fa-fw fa fa-camera-retro"></i> <span>个人相册</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa fa-graduation-cap"></i> <span>整理</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" href="/archives/"><i class="fa-fw fas fa-archive"></i> <span>时光卷轴</span></a></li><li><a class="site-page child" href="/tags/"><i class="fa-fw fas fa-tags"></i> <span>文章标签</span></a></li><li><a class="site-page child" href="/categories/"><i class="fa-fw fas fa-folder-open"></i> <span>文章分类</span></a></li><li><a class="site-page child" href="/charts/"><i class="fa-fw fa-solid fa-chart-pie"></i> <span>文章通览</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa-solid fa-user-group"></i> <span>友人</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" href="/link/"><i class="fa-fw fas fa-link"></i> <span>友链展示</span></a></li><li><a class="site-page child" href="/addlink/"><i class="fa-fw fa fa-at"></i> <span>友链申请</span></a></li><li><a class="site-page child" href="/fcircle/"><i class="fa-fw fa-solid fa-circle-nodes"></i> <span>朋友动态</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa fa-paper-plane"></i> <span>留言</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" href="/comment/"><i class="fa-fw fa-solid fa-chalkboard"></i> <span>留言白板</span></a></li><li><a class="site-page child" target="_blank" rel="noopener" href="https://mm.liushen.fun/"><i class="fa-fw fa-solid fa-pen-nib"></i> <span>提笔摘星</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa-solid fa-stethoscope"></i> <span>分享</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" target="_blank" rel="noopener" href="https://gist.liushen.fun/"><i class="fa-fw fa-solid fa-code"></i> <span>代码片段</span></a></li><li><a class="site-page child" href="/subscribe/"><i class="fa-fw fa-solid fa-rss"></i> <span>订阅本站</span></a></li><li><a class="site-page child" target="_blank" rel="noopener" href="https://pan.liushen.fun/"><i class="fa-fw fa-solid fa-laptop-file"></i> <span>清羽云盘</span></a></li></ul></div><div class="menus_item"><span class="site-page group hide"><i class="fa-fw fa fa-list"></i> <span>关于</span><i class="fas fa-chevron-down"></i></span><ul class="menus_item_child"><li><a class="site-page child" href="/about/"><i class="fa-fw fa fa-address-card"></i> <span>站长资料</span></a></li><li><a class="site-page child" href="/shuoshuo/"><i class="fa-fw fa fa-commenting"></i> <span>日常说说</span></a></li><li><a class="site-page child" href="/devices/"><i class="fa-fw fa-solid fa-tachograph-digital"></i> <span>我的设备</span></a></li></ul></div></div><center id="name-container"><a id="page-name" href="javascript:btf.scrollToDest(0, 500)">PAGE_NAME</a></center><div id="nav-right"><div id="travellings"><a class="site-page" href="https://www.travellings.cn/go.html" title="友链接力-随机开往" target="_blank" rel="noopener nofollow"><i class="fa-solid fa-bus fa-fw"></i></a></div><div id="ten-years"><a class="site-page" href="https://foreverblog.cn/go.html" title="友链接力-十年之约" target="_blank" rel="noopener nofollow"><i class="fa-brands fa-nfc-symbol fa-fw"></i></a></div><div id="random"><a class="site-page" href="javascript:randomPost()" title="随机前往一个文章"><i class="fa-solid fa-shuffle fa-fw"></i></a></div><div id="search-button"><span class="site-page social-icon search"><i class="fas fa-search fa-fw"></i></span></div><div id="toggle-menu"><span class="site-page" href="javascript:void(0);" title="展开菜单"><i class="fas fa-indent fa-fw"></i></span></div></div></nav><div id="post-info"><h1 class="post-title">Spikformer脉冲神经网络学习</h1><div id="post-meta"><div class="meta-firstline"><span class="post-meta-date"><i class="far fa-calendar-alt fa-fw post-meta-icon"></i><span class="post-meta-label">发表于</span><time class="post-meta-date-created" datetime="2024-07-06T05:33:21.000Z" title="发表于 2024-07-06 13:33:21">2024-07-06</time><span class="post-meta-separator">|</span><i class="fas fa-history fa-fw post-meta-icon"></i><span class="post-meta-label">更新于</span><time class="post-meta-date-updated" datetime="2024-07-06T05:33:21.000Z" title="更新于 2024-07-06 13:33:21">2024-07-06</time></span><span class="post-meta-categories"><span class="post-meta-separator">|</span><i class="fas fa-inbox fa-fw post-meta-icon"></i><a class="post-meta-categories" href="/categories/learning/">学习资料</a></span></div><div class="meta-secondline"><span class="post-meta-separator">|</span><span class="post-meta-wordcount"><i class="far fa-file-word fa-fw post-meta-icon"></i><span class="post-meta-label">总字数:</span><span class="word-count">8.3k</span><span class="post-meta-separator">|</span><i class="far fa-clock fa-fw post-meta-icon"></i><span class="post-meta-label">阅读时长:</span><span>26分钟</span></span><span class="post-meta-separator">|</span><span class="post-meta-pv-cv" data-flag-title=""><i class="far fa-eye fa-fw post-meta-icon"></i><span class="post-meta-label">浏览量:</span><span id="busuanzi_page_pv"><i class="fa-solid fa-spinner fa-spin"></i></span></span><span class="post-meta-separator">|</span><span class="post-meta-commentcount"><i class="far fa-comments fa-fw post-meta-icon"></i><span class="post-meta-label">评论数:</span><a href="/posts/67189760/#post-comment"><span id="ArtalkCount"><i class="fa-solid fa-spinner fa-spin"></i></span></a></span></div></div></div><section class="main-hero-waves-area waves-area"><svg class="waves-svg" xmlns="http://www.w3.org/2000/svg" xlink="http://www.w3.org/1999/xlink" viewBox="0 24 150 28" preserveAspectRatio="none" shape-rendering="auto"><defs><path id="gentle-wave" d="M-160 44c30 0 58-18 88-18s58 18 88 18 58-18 88-18 58 18 88 18v44h-352Z"></path></defs><g class="parallax"><use href="#gentle-wave" x="48" y="0"></use><use href="#gentle-wave" x="48" y="3"></use><use href="#gentle-wave" x="48" y="5"></use><use href="#gentle-wave" x="48" y="7"></use></g></svg></section></header><main class="layout" id="content-inner"><div id="post"><article class="post-content" id="article-container"><div class="ai-summary"><div class="ai-head"><div class="ai-head-left"><div class="ai-circle ai-circle-1"></div><div class="ai-circle ai-circle-2"></div><div class="ai-circle ai-circle-3"></div></div><div class="ai-head-right"><a class="ai-about-ai" href="/posts/40702a0d/">关于AI</a></div></div><div class="ai-explanation" style="display:block" data-summary="这里是清羽AI，这篇文章主要介绍了Spikformer脉冲神经网络的学习与研究。Spikformer是一种结合了脉冲神经网络（SNNs）和Transformer架构的混合模型，旨在利用两者的优势来实现更高效的计算和更优异的性能。文章首先回顾了SNNs的基本原理，包括LIF模型和STDP规则，以及它们在处理时序数据时的独特优势。接着，文章探讨了Transformer模型的自注意力机制及其在计算机视觉任务中的应用。Spikformer的创新点在于引入了脉冲自注意力机制（SSA），通过脉冲形式的查询、键和值进行计算，避免了浮点数运算和softmax归一化，从而降低了计算开销和能耗。文章还详细描述了Spikformer的架构设计，包括脉冲补丁切割模块、Spikformer编码器块和线性分类头，以及如何在PyTorch环境下实现Spikformer模型。此外，文章还对比了Spikformer与Baseline模型和SDTv2模型的训练过程和参数配置，展示了Spikformer在不同数据集上的性能表现。整体而言，这篇文章为读者提供了对Spikformer模型深入的理解，以及如何在实践中应用该模型进行图像分类任务。">清羽AI正在绞尽脑汁想思路ING···</div><div class="ai-title"><div class="ai-title-left"><i class="fa-brands fa-slack"></i><div class="ai-title-text">清羽のAI摘要</div></div><div class="ai-tag" id="ai-tag">GLM-4-Flash</div></div></div><h2 id="碎碎念"><a href="#碎碎念" class="headerlink" title="碎碎念"></a>碎碎念</h2><p>这篇文章鸽的比较久，也不是说懒（其实就是懒），主要是这两天实在太忙了（试图找借口），比如实习，每天都在办公室坐着（坐着摸鱼），老师也会不断地布置新任务以小组方式实现（其实根本没啥任务），而这个脉冲神经网络是我们六月底进行的研究，在研究的过程中感觉比较有意思（又能水一篇了）。所以还是决定将其分享到这里，可能稍微有些地方有问题（菜就多练），欢迎大家指正！</p><p><strong>注意：</strong>本篇文章可能稍微有些枯燥乏味，因为是以实验报告的内容，可能没有一些华丽的辞藻，更多是关于相关领域的一些术词，可能仅供专业人士研究。</p><h2 id="背景"><a href="#背景" class="headerlink" title="背景"></a>背景</h2><p>在当前的深度学习领域，脉冲神经网络（Spiking Neural Networks, SNNs）和Transformer架构的融合正成为一种新的研究趋势。这些模型在各种任务中显示出显著的性能提升。本文的研究对象Spikformer即是一种结合了SNNs和Transformer架构的混合模型，旨在利用两者的优势来实现更高效的计算和更优异的性能</p><h3 id="脉冲神经网络"><a href="#脉冲神经网络" class="headerlink" title="脉冲神经网络"></a>脉冲神经网络</h3><p>脉冲神经网络（SNNs）是模仿生物神经系统的一类神经网络，与传统的人工神经网络（ANNs）不同，SNNs通过离散的脉冲信号进行信息传递。每个神经元在接收到其他神经元的脉冲信号时，积累电压，一旦电压达到一定阈值，神经元就会发出一个脉冲信号。这种基于事件驱动的计算方式，使得SNNs在理论上能够实现更高效的计算和更低的能耗。</p><p>SNNs的主要优点在于其生物启发的计算方式，这使得它们在处理稀疏和非结构化数据时具有独特的优势。然而，SNNs也面临着一些挑战，如训练的复杂性和硬件实现的难度。近年来，随着脉冲神经网络训练算法的发展，如脉冲时间依赖可塑性（STDP）和基于梯度的优化方法，使得SNNs在实际应用中变得更加可行。</p><h3 id="Transformer模型"><a href="#Transformer模型" class="headerlink" title="Transformer模型"></a>Transformer模型</h3><p>Transformer 是一种基于自注意力机制的模型，最早应用于自然语言处理任务，如机器翻译。其核心是自注意力机制（Self-Attention），能够有效捕捉输入序列中各元素之间的依赖关系。近年来，Transformer 在计算机视觉任务中也取得了显著成果，如图像分类、目标检测和语义分割等。自注意力机制通过计算查询（Query）、键（Key）和值（Value）之间的点积来加权特征，从而捕捉全局依赖关系。</p><p>尽管 Transformer 在视觉任务中的表现优异，但其计算复杂度较高，尤其是在处理大规模图像时。这促使研究者们探索改进自注意力机制的方法，例如使用卷积层进行特征提取、简化自注意力计算等。</p><h3 id="Spikformer"><a href="#Spikformer" class="headerlink" title="Spikformer"></a>Spikformer</h3><p>脉冲神经网络（SNNs）是第三代神经网络，与传统的人工神经网络（ANNs）相比，SNNs 模仿生物神经元的活动方式，通过脉冲序列来传递信息。SNNs 中的神经元会在接收到足够的电信号后发出脉冲（spike），这种方式不仅生物学上更具可解释性，而且在硬件实现上能显著降低能量消耗。SNNs 的低功耗和事件驱动特性使其在需要高能效的应用场景中表现出色，如嵌入式系统和物联网设备。</p><p>SNNs 的研究主要集中在两个方面：一种是将传统的 ANN 转换为 SNN（ANN-to-SNN conversion），另一种是直接训练 SNN（direct training）。转换方法通过替换 ANN 中的激活函数为脉冲神经元实现高效的性能移植，但需要较多的时间步长来逼近 ANN 的输出，从而导致较高的延迟。而直接训练方法通过时间步长展开 SNN 并采用反向传播算法训练模型，尽管在事件触发机制上存在非连续性，但可以通过替代梯度法进行训练。</p><p>本文中的Spikformer 是将 SNN 与 Transformer 结合的一种新型架构，旨在结合两者的优势，既保留 SNN 的低功耗特性，又利用 Transformer 的全局特征捕捉能力。Spikformer 引入了脉冲自注意力机制（Spiking Self Attention, SSA），其创新点在于：</p><ol><li>脉冲自注意力机制（SSA）：传统的自注意力机制需要进行浮点数运算并通过 softmax 函数归一化权重，而 SSA 使用脉冲形式的查询、键和值（仅包含 0 和 1），避免了乘法运算，使得计算更加高效，能耗更低。</li><li>无 softmax 的脉冲自注意力：由于脉冲形式的查询和键计算出的注意力图天然非负，因此不需要 softmax 进行归一化，从而减少了计算开销。</li><li>架构设计：Spikformer 的设计考虑了 SNN 的计算特性，利用逻辑与操作和加法进行注意力计算，进一步降低了计算复杂度和能耗。</li></ol><h2 id="原理"><a href="#原理" class="headerlink" title="原理"></a>原理</h2><h3 id="脉冲神经网络-1"><a href="#脉冲神经网络-1" class="headerlink" title="脉冲神经网络"></a>脉冲神经网络</h3><p>脉冲神经网络（Spiking Neural Networks, SNNs）的核心原理在于模拟生物神经元通过电脉冲进行信息传递的过程。在生物神经系统中，神经元通过膜电位的变化来响应外界刺激，当膜电位达到一定阈值时，神经元会发放一个动作电位，即脉冲，随后膜电位迅速复位。SNN正是基于这一机制进行建模的。</p><p>在SNN中，一个基本的神经元模型是Leaky Integrate-and-Fire（LIF）模型。该模型描述了神经元膜电位随时间的变化情况，其动态可以用以下微分方程表示：</p><p><img src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/06/6688d76d8c6e9.webp" alt="微分方程"></p><p>其中：</p><ol><li><p>Vm(t)代表神经元在时间t的膜电位</p></li><li><p>Vrest是静息膜电位</p></li><li><p>t是膜时间常数</p></li><li><p>I(t) 是输入电流</p></li></ol><p>当膜电位Vm(t) 达到一个预设的发放阈值Vthresh时，神经元发放一个脉冲，并将膜电位重置到一个较低的值，如静息膜电位。</p><p>LIF模型中的“leak”效应模拟了膜电位自然衰减的过程，即使没有新的输入，膜电位也会逐渐向静息状态衰减。这种leak效应是必要的，因为它防止了膜电位的无限积累。</p><p>在SNN中，突触的权重更新通常遵循Spike-Time-Dependent Plasticity（STDP）规则。STDP是一种基于脉冲时间差异的突触可塑性机制，其基本思想是：如果前突触神经元的脉冲紧接着后突触神经元的脉冲之后到达，那么突触权重会增强（Long-Term Potentiation, LTP）；反之，如果后突触神经元的脉冲先于前突触神经元的脉冲到达，突触权重会减弱（Long-Term Depression, LTD）。这种权重更新机制可以用以下公式描述：</p><p><img src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/06/6688d7c9d048b.webp" alt="权重更新机制"></p><p>其中：</p><ol><li><p>A+和A-是LTP和LTD的强度参数</p></li><li><p>tpre和tpost分别是突触前和突触后脉冲的时间</p></li><li><p>t+和t-是LTP和LTD的时间窗口参数</p></li></ol><p>STDP规则的实现通常依赖于突触后神经元脉冲发放时的短期变化，这可以通过引入两个状态变量来实现，分别对应突触前和突触后的活动痕迹。当突触前或突触后神经元发放脉冲时，相应的痕迹变量会更新，从而影响突触权重的调整。</p><p>SNN的另一个关键特性是其对时间的编码能力。神经元发放脉冲的时间点和频率都可以携带信息。这种时间编码机制使得SNN能够处理时序数据，并且在某些任务中展现出比传统人工神经网络（ANN）更优越的性能。</p><ol><li><p>时间编码通过脉冲发放的时间间隔来表示信号的强度。例如，较短的时间间隔表示较高的信号强度。</p></li><li><p>频率编码通过脉冲发放的频率来表示信号的强度。例如，较高的脉冲频率表示较高的信号强度。</p></li><li><p>相位编码通过脉冲相对于某一参考时间点的相位来表示信号的强度。相位编码在处理周期性信号时特别有效。</p></li></ol><p>脉冲神经网络的结构可以是前馈网络、反馈网络或递归网络。不同的结构适用于不同的任务，前馈网络是最简单的网络结构，信息从输入层传递到输出层，中间可以有多个隐藏层。每一层的神经元只接收前一层的脉冲输入，递归网络包含环路，神经元不仅可以接收来自前一层的输入，还可以接收来自本层或前几层的反馈输入。递归网络适用于处理时间序列数据，因为它们能够保留之前的状态信息，混合网络结合了前馈和递归网络的特点，能够同时处理静态和动态数据。它们通常在生物神经网络的建模中使用。</p><p>与传统的人工神经网络相比，脉冲神经网络具有更高的能量效率和生物合理性。SNNs 通过脉冲的离散发放和事件驱动的计算方式，显著减少了计算和存储的开销。这使得 SNNs 特别适用于低功耗设备和实时应用，SNNs 的事件驱动计算方式意味着只有在脉冲发放时才进行计算，避免了大量的无效计算。每个脉冲的发放都是一个事件，触发相关神经元的状态更新，由于 SNNs 只在脉冲事件发生时进行计算，这种计算方式极大地降低了功耗。生物神经元也是通过类似的方式实现高效的能量利用，从而支持复杂的认知功能。</p><h3 id="注意力机制-Transformer"><a href="#注意力机制-Transformer" class="headerlink" title="注意力机制&amp;Transformer"></a>注意力机制&amp;Transformer</h3><p>Transformer架构中的注意力机制是其核心组件之一，对其性能和能力的提升至关重要。Transformer模型主要由编码器和解码器组成，而注意力机制在这两个模块中都扮演了关键角色。</p><p>在Transformer中，自注意力机制（Self-Attention）是用于捕捉序列中元素之间关系的主要方法。与传统的序列处理模型不同，自注意力机制能够并行处理整个输入序列，并且可以灵活地建模长距离依赖关系。</p><p>计算过程：</p><ol><li><p>给定输入序列X={x1,x2,x3,……}，首先通过线性变换将每个输入xn映射到查询（Query）、键（Key）和值（Value）向量：</p><p><img src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/06/6688d87390ed3.webp"></p></li><li><p>计算注意力得分：接下来，计算查询向量和键向量的点积，得到注意力得分矩阵E，E=QK^T，这种点积操作可以高效地计算序列中每个元素对其他所有元素的相关性。</p></li><li><p>归一化得分：为了使得得分更具可比性，通常使用softmax函数将其归一化，得到注意力权重矩阵A，这一步确保了每个查询向量的注意力权重之和为1。</p></li><li><p>最后，将注意力权重矩阵A应用于值向量V，得到输出表示，从而捕捉到序列中元素之间的全局依赖关系。</p></li></ol><p>Transformer中的自注意力机制进一步扩展为多头注意力机制（Multi-Head Attention）。多头注意力机制的基本思想是使用多个独立的注意力头，每个头在不同的子空间中学习序列元素之间的关系。</p><ol><li><p>对于每个输入序列，通过不同的线性变换得到h组查询、键和值向量：</p><p><img src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/06/6688d8e8441cf.webp"></p></li><li><p>独立计算注意力：每个注意力头独立计算自注意力，得到一组输出：</p><p><img src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/06/6688d8fff2782.webp"></p></li><li><p>连接和线性变换：将所有注意力头的输出连接起来，形成一个新的表示，随后通过线性变换得到最终输出：</p><p><img src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/06/6688d9171068a.webp"></p><p>其中，W0是用于整合不同头输出的权重矩阵。</p></li></ol><p>在编码器中，自注意力机制用于每一层的输入表示，捕捉输入序列内部的依赖关系。编码器中的每一层包括两个子层：多头自注意力机制和前馈神经网络（Feed-Forward Neural Network）。</p><p>多头自注意力子层：通过多个注意力头，并行处理输入序列的每个元素，捕捉到序列中不同位置的关系。</p><p>前馈神经网络子层：对注意力机制的输出进行进一步的非线性变换，以增加模型的表达能力。</p><p>每个子层之后都加了残差连接（Residual Connection）和层归一化（Layer Normalization），确保了梯度的稳定传递，并加速了训练收敛。</p><p>解码器中除了自注意力机制外，还引入了编码器-解码器注意力机制（Encoder-Decoder Attention），以便在生成每个输出时参考编码器的表示。</p><p>自注意力子层：与编码器类似，用于捕捉解码器内部生成序列的依赖关系。</p><p>编码器-解码器注意力子层：将解码器的查询向量与编码器的键和值向量进行注意力计算，结合编码器的全局信息生成当前输出。</p><h3 id="Spikformer-1"><a href="#Spikformer-1" class="headerlink" title="Spikformer"></a>Spikformer</h3><p>Spikformer结合了脉冲神经网络（SNN）和Transformer架构，旨在利用两者的优势来提升处理复杂时序数据的能力。其整体架构由三个主要部分组成：脉冲补丁切割模块（SPS）、Spikformer编码器块和线性分类头。</p><p>首先，输入图像经过脉冲补丁切割模块（SPS）处理。该模块包括一个卷积层（Conv2D），用于提取输入图像的初始特征。然后，通过批归一化（Batch Normalization，BN）对特征进行归一化处理，批归一化在SNN中替代了常用的层归一化，因为层归一化不适用于SNN。接下来，最大池化（Max Pooling，MP）用于对特征进行下采样，最终生成脉冲特征。这一步的输出是一个脉冲序列，包含时序信息的脉冲特征。</p><p>进入Spikformer编码器块后，首先对脉冲序列进行脉冲位置嵌入（Spiking Position Embedding），以保留时序信息。嵌入后的脉冲序列被输入到脉冲自注意力机制（Spiking Self Attention，SSA）中。自注意力机制的核心是计算输入序列中各元素间的相关性，从而在全局范围内捕捉数据特征。SSA在Spikformer中专门为脉冲序列设计，包含线性变换层和批归一化层，通过矩阵点积（Matrix Dot-Product）计算序列间的相似度，并通过缩放（Scale）和软最大（Softmax）函数对相似度进行归一化。然后，将归一化后的相似度与输入序列相乘，生成新的脉冲序列表示。</p><p>脉冲自注意力机制的输出通过多层感知器（Multi-Layer Perceptron，MLP）进行进一步处理。MLP由多个线性层和激活函数组成，能够对数据进行非线性变换，从而增强模型的表达能力。在Spikformer中，MLP模块同样采用批归一化来稳定训练过程。最终，经过多个Spikformer编码器块堆叠，得到高维的脉冲序列表示。</p><p>在模型的最后阶段，经过编码器块处理后的脉冲序列被送入线性分类头。线性分类头将高维脉冲序列映射到分类标签空间，生成最终的分类结果。这一步的输出可以用于各种任务，如图像分类或时序数据分析。</p><p><img src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/06/6688d965d3dd8.webp" alt="SkipFormer"></p><p>在SNN的基本原理中，脉冲神经元的膜电位随时间变化的动态过程可以用Leaky Integrate-and-Fire（LIF）模型描述，其微分方程如下：</p><p><img src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/06/6688d9879261a.webp"></p><p>在Spikformer中，脉冲自注意力机制（SSA）是关键组件之一。自注意力机制通过以下步骤计算输入序列的注意力权重：</p><ul><li><p>线性变换：将输入脉冲序列映射到查询（Query），键（Key）和值（Value）表示。</p></li><li><p>矩阵点积：计算查询和键之间的相似度，公式为：</p><p><img src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/06/6688d9aa1382c.webp"></p></li><li><p>其中，Q是查询矩阵，K是键矩阵，V是值矩阵，dk是键的维度。通过矩阵点积计算得到的相似度矩阵经过缩放和软最大处理，生成归一化的注意力权重</p><p>加权求和：将归一化的注意力权重与值矩阵相乘，生成新的脉冲序列表示。</p></li></ul><h2 id="尝试实现"><a href="#尝试实现" class="headerlink" title="尝试实现"></a>尝试实现</h2><h3 id="环境"><a href="#环境" class="headerlink" title="环境"></a>环境</h3><ol><li><p>硬件</p><p>本地训练（Baseline模型训练）</p><ul><li><p>CPU: 13th Gen Intel(R) Core(TM) i5-13490F</p></li><li><p>GPU: NVIDIA GeForce RTX 4070 Ti SUPER</p></li></ul><p>阿里云（Spikformer与SDTv2训练）</p><ul><li><p>CPU: Intel(R) Xeon(R) Platinum 8168 Processor</p></li><li><p>GPU: NVIDIA A10</p></li></ul><p>AutoDL（Spikformer与SDTv2训练）</p><ul><li><p>CPU: Intel(R) Xeon(R) Platinum 8352V</p></li><li><p>GPU: 2 * NVIDIA GeForce RTX 4090</p></li></ul></li><li><p>软件</p><p>高性能并行计算平台: NVIDIA CUDA</p><ul><li>CUDA提供了对NVIDIA GPU的直接访问，允许我们利用GPU的并行计算能力来加速深度学习模型的训练过程。</li></ul><p>机器学习框架: PyTorch</p><ul><li>PyTorch是一个开源的深度学习框架，提供了灵活且高效的张量计算和动态计算图支持，方便我们构建和训练复杂的深度学习模型。</li></ul><p>图片分类框架（Baseline模型选用）: mmpretrain</p><ul><li>mmpretrain是一个专注于图像分类任务的框架，提供了许多预训练模型和训练工具，使得Baseline模型的训练更加高效和便捷。</li></ul></li></ol><h3 id="数据集"><a href="#数据集" class="headerlink" title="数据集"></a>数据集</h3><p><strong>CIFAR-10数据集</strong></p><p>CIFAR-10数据集是一个常用于图像分类任务的标准数据集。它由60,000张32×32的RGB彩色图片组成，这些图片分属于10个不同的类别。每个类别包含6,000张图片，类别包括飞机、汽车、鸟、猫、鹿、狗、青蛙、马、船和卡车。数据集进一步划分为训练集和测试集：</p><ul><li><p>训练集：50,000张图片，每个类别5,000张</p></li><li><p>测试集：10,000张图片，每个类别1,000张</p></li></ul><p>CIFAR-10数据集因其规模适中和分类任务的多样性，成为评估图像分类算法性能的一个重要基准。</p><p><strong>ILSVRC2012 (ImageNet)数据集</strong></p><p>ILSVRC2012数据集，通常称为ImageNet数据集，是一个大规模的图像数据集，用于图像分类和物体检测任务。该数据集包含超过1,300,000张RGB彩色图片，分为1,000个类别。每个类别包含约1,300张图片。数据集同样分为训练集和测试集：</p><ul><li><p>训练集：1,300,000张图片，每个类别约1,300张</p></li><li><p>测试集：50,000张图片，每个类别50张</p></li></ul><h3 id="Baseline训练"><a href="#Baseline训练" class="headerlink" title="Baseline训练"></a>Baseline训练</h3><p>在正式复现论文代码前，我们实现了基于残差模型的Baseline模型作为基准参考。该基准模型的详细配置和训练策略如下：</p><p>在模型选型方面，我们选择了ResNet-101作为主干网络。ResNet-101是一种深度残差网络，通过引入残差连接，有效缓解了深层网络中的梯度消失问题，从而使得网络能够在更深的层次上进行训练和优化。在分类颈部分，我们采用了GlobalAveragePooling层。GlobalAveragePooling层将每个通道的特征图的平均值作为输出，从而减少参数数量，降低过拟合的风险，并提高模型的泛化能力。分类头方面，我们使用了LinearClsHead。LinearClsHead是一个简单的全连接层，用于将全局平均池化后的特征映射到最终的类别预测上。</p><p>为了扩增数据量，提高模型的泛化能力，我们在训练过程中采用了多种数据增强技术。首先，我们使用了RandomResizedCrop，该技术通过随机裁剪并调整图片大小，有助于模型学习不同的尺度和视角的特征。其次，我们引入了RandomFlip，通过随机水平翻转图片，增加训练数据的多样性。此外，ColorJitter技术用于随机改变图片的亮度、对比度、饱和度和色调，使得模型对颜色变化更加鲁棒。为了进一步增加训练数据的多样性，我们使用了Mixup技术，通过将两张图片及其标签按一定比例混合，提供了一种正则化效果。最后，我们还采用了CutMix技术，将一张图片的部分区域替换为另一张图片的相应区域，从而进一步增加训练数据的多样性和模型的鲁棒性。</p><p>在优化器和学习率策略方面，我们选择了Adam优化器。Adam优化器结合了动量和自适应学习率的方法，能够更快地收敛并且对学习率的选择不那么敏感。初始学习率设为0.001。在学习率策略上，我们使用了MultiStepLR。在训练的过程中，学习率在特定的训练轮数后进行衰减。具体地，学习率在第30、60、90轮时降低10倍，这样的策略有助于模型在初期快速收敛，同时在后期进行微调以达到更好的性能。</p><figure class="highlight python"><table><tbody><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br></pre></td><td class="code"><pre><span class="line">model = <span class="built_in">dict</span>(</span><br><span class="line">    <span class="built_in">type</span>=<span class="string">'ImageClassifier'</span>,</span><br><span class="line">    backbone=<span class="built_in">dict</span>(</span><br><span class="line">        <span class="built_in">type</span>=<span class="string">'ResNet'</span>,</span><br><span class="line">        depth=<span class="number">101</span>,</span><br><span class="line">        num_stages=<span class="number">4</span>,</span><br><span class="line">        out_indices=(<span class="number">3</span>, ),</span><br><span class="line">        style=<span class="string">'pytorch'</span>),</span><br><span class="line">    neck=<span class="built_in">dict</span>(<span class="built_in">type</span>=<span class="string">'GlobalAveragePooling'</span>),</span><br><span class="line">    head=<span class="built_in">dict</span>(</span><br><span class="line">        <span class="built_in">type</span>=<span class="string">'LinearClsHead'</span>,</span><br><span class="line">        num_classes=<span class="number">100</span>,</span><br><span class="line">        in_channels=<span class="number">2048</span>,</span><br><span class="line">        loss=<span class="built_in">dict</span>(<span class="built_in">type</span>=<span class="string">'CrossEntropyLoss'</span>, loss_weight=<span class="number">1.0</span>),</span><br><span class="line">    ),</span><br><span class="line">    train_cfg=<span class="built_in">dict</span>(augments=[</span><br><span class="line">        <span class="built_in">dict</span>(<span class="built_in">type</span>=<span class="string">'Mixup'</span>, alpha=<span class="number">0.8</span>),</span><br><span class="line">        <span class="built_in">dict</span>(<span class="built_in">type</span>=<span class="string">'CutMix'</span>, alpha=<span class="number">1.0</span>),</span><br><span class="line">    ])</span><br><span class="line">)</span><br></pre></td></tr></tbody></table></figure><h3 id="Spikformer模型复现"><a href="#Spikformer模型复现" class="headerlink" title="Spikformer模型复现"></a>Spikformer模型复现</h3><p>在复现论文提供的Spikformer源代码时，我们选择了以下参数配置：</p><p>首先，输入图片的尺寸设定为224像素乘以224像素。这一尺寸与许多图像分类任务中的标准输入尺寸一致，有助于确保模型的通用性。Embedding尺寸被设定为512维，这意味着输入图片在经过初步处理后将被映射到一个512维的特征空间中。</p><p>在Spikformer的结构中，我们使用了SPS Patch尺寸为16像素乘以16像素。这意味着输入图片将被划分为若干个16x16的补丁，每个补丁将被单独处理。Spikformer的层数设定为8层，这为模型提供了足够的深度，以便能够捕捉到图像中的复杂特征。每一层中的注意力头数目为8个，这允许模型能够在多个子空间中并行地关注不同的特征，从而提升模型的表现能力。</p><p>MLP隐藏层的比例被设定为4倍，即2048维。这意味着在多层感知机（MLP）部分，隐藏层的维度是输入维度的四倍，这样的设计有助于增加模型的非线性表示能力，从而增强模型的分类性能。分类头部分，我们选择了一个线性分类头，用于将特征映射到最终的类别预测上。</p><p>在优化器的选择上，我们使用了AdamW优化器。AdamW是一种结合了Adam优化器和权重衰减技术的优化器，能够有效防止过拟合，并且在许多任务中表现优异。初始学习率设定为0.0005，训练的前20轮作为预热轮，学习率从0.000001逐渐增大到0.0005，之后使用余弦学习率调度进行调整。这样的学习率策略有助于模型在训练初期稳定收敛，并在后期通过逐渐减小学习率来达到更好的性能表现。</p><p>整个训练过程共进行了300轮。在数据增强技术方面，我们选用了与之前基准模型训练相同的技术，包括RandomResizedCrop、RandomFlip、ColorJitter、Mixup和CutMix等。这些数据增强技术能够有效扩充训练数据，提高模型的泛化能力。</p><p>通过上述参数配置和训练策略，我们成功复现了论文中的Spikformer模型，并通过详细的训练和评估过程，确保模型能够达到预期的性能表现。</p><figure class="highlight python"><table><tbody><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><span class="line"><span class="meta">@register_model</span></span><br><span class="line"><span class="keyword">def</span> <span class="title function_">spikformer</span>(<span class="params">pretrained=<span class="literal">False</span>, **kwargs</span>):</span><br><span class="line">    model = Spikformer(</span><br><span class="line">        img_size_h=<span class="number">224</span>, img_size_w=<span class="number">224</span>,</span><br><span class="line">        patch_size=<span class="number">16</span>, embed_dims=<span class="number">512</span>, num_heads=<span class="number">8</span>, mlp_ratios=<span class="number">4</span>,</span><br><span class="line">        in_channels=<span class="number">3</span>, num_classes=<span class="number">100</span>, qkv_bias=<span class="literal">False</span>,</span><br><span class="line">        norm_layer=partial(nn.LayerNorm, eps=<span class="number">1e-6</span>), depths=<span class="number">8</span>, sr_ratios=<span class="number">1</span>,</span><br><span class="line">        **kwargs</span><br><span class="line">    )</span><br><span class="line">    model.default_cfg = _cfg()</span><br><span class="line">    <span class="keyword">return</span> model</span><br></pre></td></tr></tbody></table></figure><h3 id="SDTv2模型"><a href="#SDTv2模型" class="headerlink" title="SDTv2模型"></a>SDTv2模型</h3><p>运行SDTv2模型源代码时，我们选择了以下参数配置，以确保模型能够在不同的任务中表现出色：</p><p>首先，输入图片的尺寸设定为224像素乘以224像素。这一尺寸与许多标准图像分类任务的输入尺寸一致，确保模型能够处理常见的数据格式。在Embedding尺寸方面，SDTv2模型采用了多种不同的维度：128维、256维、512维和640维。不同的Embedding尺寸允许模型在不同的层次上捕捉到图像的多尺度特征，从而提高模型的整体表现。</p><p>与Spikformer类似，SPS Patch的尺寸设定为16像素乘以16像素。这意味着输入图片将被分割成若干个16x16的补丁，每个补丁将被单独处理，以提取局部特征。SDTv2模型的深度为8层，每层包含8个注意力头。这些注意力头允许模型在多个子空间中并行地关注不同的特征，从而提高模型的特征表示能力。</p><p>MLP隐藏层的比例设定为4倍，这意味着在多层感知机（MLP）部分，隐藏层的维度是输入维度的四倍。这种设计有助于增强模型的非线性表示能力，从而提高模型的分类性能。分类头部分，我们同样选择了一个线性分类头，用于将特征映射到最终的类别预测上。</p><p>在优化器的选择上，我们使用了AdamW优化器。AdamW结合了Adam优化器和权重衰减技术，能够有效防止过拟合，并在许多任务中表现优异。初始学习率设定为0.0005，前20轮作为预热轮，学习率从0.000001逐渐增大到0.0005，之后使用余弦学习率调度进行调整。这种学习率策略有助于模型在训练初期稳定收敛，并在后期通过逐渐减小学习率来达到更好的性能表现。</p><p>整个训练过程共进行了200轮。数据增强技术方面，我们选用了与之前基准模型训练相同的技术，包括RandomResizedCrop、RandomFlip、ColorJitter、Mixup和CutMix等。这些数据增强技术能够有效扩充训练数据，提高模型的泛化能力。</p><figure class="highlight python"><table><tbody><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><span class="line">CUDA_VISIBLE_DEVICES=<span class="number">0</span>,<span class="number">1</span> torchrun --standalone --nproc_per_node=<span class="number">2</span> \</span><br><span class="line">  main_finetune.py \</span><br><span class="line">  --batch_size <span class="number">64</span> \</span><br><span class="line">  --blr <span class="number">6e-4</span> \</span><br><span class="line">  --warmup_epochs <span class="number">10</span> \</span><br><span class="line">  --epochs <span class="number">200</span> \</span><br><span class="line">  --model metaspikformer_8_512 \</span><br><span class="line">  --data_path /root/autodl-tmp/ ILSVRC2012 \</span><br><span class="line">  --output_dir outputs/55M \</span><br><span class="line">  --log_dir outputs/55M \</span><br><span class="line">  --model_mode ms \</span><br><span class="line">  --dist_eval</span><br></pre></td></tr></tbody></table></figure><h2 id="结果展示"><a href="#结果展示" class="headerlink" title="结果展示"></a>结果展示</h2><h3 id="评估结果"><a href="#评估结果" class="headerlink" title="评估结果"></a>评估结果</h3><table><thead><tr><th align="center"><strong>模型</strong></th><th align="center"><strong>数据集</strong></th><th align="center"><strong>Top-1 (%)</strong></th><th align="center"><strong>Top-5 (%)</strong></th><th align="center"><strong>轮数</strong> <strong>(epoch)</strong></th></tr></thead><tbody><tr><td align="center">ResNet-101</td><td align="center">CIFAR-10</td><td align="center">80.99</td><td align="center">/</td><td align="center">100</td></tr><tr><td align="center">Spikformer</td><td align="center">CIFAR-10</td><td align="center"><strong>94.82</strong></td><td align="center"><strong>99.62</strong></td><td align="center">300</td></tr><tr><td align="center">ResNet-101</td><td align="center">ImageNet-100</td><td align="center">72.90</td><td align="center">91.29</td><td align="center">200</td></tr><tr><td align="center">ResNet-101(aug)</td><td align="center">ImageNet-100</td><td align="center">78.26</td><td align="center">92.70</td><td align="center">200</td></tr><tr><td align="center">Spikformer</td><td align="center">ImageNet-100</td><td align="center">82.80</td><td align="center"><strong>96.26</strong></td><td align="center">200</td></tr><tr><td align="center">SDTv2</td><td align="center">ImageNet-100</td><td align="center"><strong>84.46</strong></td><td align="center">95.82</td><td align="center">200</td></tr></tbody></table><h3 id="结果分析"><a href="#结果分析" class="headerlink" title="结果分析"></a>结果分析</h3><p>在CIFAR-10数据集上，我们比较了ResNet-101和Spikformer的表现。ResNet-101在没有数据增强的情况下，经过100轮训练，达到了80.99%的Top-1准确率。而Spikformer在经过300轮训练后，取得了显著提升，达到了94.82%的Top-1准确率和99.62%的Top-5准确率。这个结果表明Spikformer在处理小规模图像数据集时，表现出色，显著优于传统的卷积神经网络ResNet-101。</p><p>在ImageNet-100数据集上，我们对比了ResNet-101、ResNet-101（使用数据增强技术）和两种新的架构模型Spikformer和SDTv2。结果显示，标准的ResNet-101在没有数据增强的情况下，经过200轮训练，达到了72.90%的Top-1准确率和91.29%的Top-5准确率。加入数据增强技术后，ResNet-101的表现有所提升，达到了78.26%的Top-1准确率和92.70%的Top-5准确率。</p><p>Spikformer在ImageNet-100数据集上的表现比ResNet-101更为优越。经过200轮训练，Spikformer达到了82.80%的Top-1准确率和96.26%的Top-5准确率。相比于使用数据增强的ResNet-101，Spikformer在Top-1和Top-5准确率上分别提升了4.54%和3.56%。</p><p>SDTv2则表现得更加出色。在相同的训练条件下，SDTv2在ImageNet-100数据集上达到了84.46%的Top-1准确率和95.82%的Top-5准确率。这一结果表明，SDTv2在处理大规模图像数据集时，能够提供更高的分类准确率，超过了Spikformer和ResNet-101。</p><p>我们的实验结果表明，Spikformer和SDTv2在处理图像分类任务时，表现优于传统的卷积神经网络ResNet-101。特别是在使用数据增强技术和优化训练策略后，Spikformer和SDTv2展示了更强的特征提取和分类能力。Spikformer在小规模数据集CIFAR-10上的出色表现，以及SDTv2在大规模数据集ImageNet-100上的优越性能，证明了它们在不同场景下的广泛适用性。未来，我们可以进一步优化这些模型，探索它们在更多任务和数据集上的潜力。</p><h2 id="总结"><a href="#总结" class="headerlink" title="总结"></a>总结</h2><p>在本项目中，我们着重研究并复现了Spikformer模型，通过与传统的卷积神经网络ResNet-101以及另一种新型架构SDTv2进行对比实验，验证了Spikformer在图像分类任务中的性能优势。我们选用了CIFAR-10和ImageNet-100两个数据集，采用不同的训练策略和优化方法，全面评估了各个模型的表现。</p><p>在CIFAR-10数据集上，Spikformer显著超过了传统的ResNet-101模型。经过300轮训练，Spikformer达到了94.82%的Top-1准确率和99.62%的Top-5准确率，远高于ResNet-101的80.99%（Top-1准确率）。这表明Spikformer在处理小规模图像数据时，具有更强的特征提取和分类能力。同时，这一结果展示了Spikformer在小规模数据集上的卓越表现。</p><p>在ImageNet-100数据集上，我们发现数据增强技术对ResNet-101的性能有显著提升。在没有数据增强的情况下，ResNet-101的Top-1和Top-5准确率分别为72.90%和91.29%；而使用数据增强后，准确率提升至78.26%（Top-1）和92.70%（Top-5）。这说明数据增强技术能够有效提高模型的泛化能力。然而，即使在使用数据增强的情况下，ResNet-101的表现依然不及Spikformer。</p><p>在ImageNet-100数据集上，Spikformer同样展示了优越的性能。经过200轮训练，Spikformer的Top-1准确率为82.80%，Top-5准确率为96.26%。相较于使用数据增强的ResNet-101，Spikformer在Top-1和Top-5准确率上分别提升了4.54%和3.56%。这一结果表明，Spikformer在处理大规模数据时依旧具备强大的学习和分类能力。同时，SDTv2在相同数据集上的表现也十分出色，其Top-1准确率为84.46%，Top-5准确率为95.82%，略高于Spikformer，但差距不大。</p><p>通过这些实验，我们验证了Spikformer在图像分类任务中的优越性能，特别是在处理不同规模的数据集时，均展现出强大的特征提取和分类能力。与传统的卷积神经网络相比，Spikformer在多个方面均具有显著的优势。</p><h2 id="参考文章"><a href="#参考文章" class="headerlink" title="参考文章"></a>参考文章</h2><div class="liushen-tag-link"><a class="tag-Link" target="_blank" href="/safego/?u=aHR0cHM6Ly9vcGVucmV2aWV3Lm5ldC9mb3J1bT9pZD1mckU0ZlV3el9o" rel="external nofollow noopener noreferrer"><div class="tag-link-tips">🙄引用站外地址，不保证站点的可用性和安全性</div><div class="tag-link-bottom"><div class="tag-link-left" style="background-image:url(https://p.liiiu.cn/i/2024/07/27/66a4632bbf06e.webp)"></div><div class="tag-link-right"><div class="tag-link-title">Spikformer: When Spiking Neural Network Meets Transformer</div><div class="tag-link-sitename">openreview.net</div></div><i class="fa-solid fa-angle-right"></i></div></a></div><div class="liushen-tag-link"><a class="tag-Link" target="_blank" href="/safego/?u=aHR0cHM6Ly9hcnhpdi5vcmcvYWJzLzI0MDEuMDIwMjA" rel="external nofollow noopener noreferrer"><div class="tag-link-tips">🙄引用站外地址，不保证站点的可用性和安全性</div><div class="tag-link-bottom"><div class="tag-link-left" style="background-image:url(https://p.liiiu.cn/i/2024/07/27/66a4632bbf06e.webp)"></div><div class="tag-link-right"><div class="tag-link-title">Spikformer V2: Join the High Accuracy Club on ImageNet with an SNN Ticket</div><div class="tag-link-sitename">arxiv.org</div></div><i class="fa-solid fa-angle-right"></i></div></a></div><div class="liushen-tag-link"><a class="tag-Link" target="_blank" href="/safego/?u=aHR0cHM6Ly9naXRodWIuY29tL1pLLVpob3Uvc3Bpa2Zvcm1lcg" rel="external nofollow noopener noreferrer"><div class="tag-link-tips">🙄引用站外地址，不保证站点的可用性和安全性</div><div class="tag-link-bottom"><div class="tag-link-left" style="background-image:url(https://p.liiiu.cn/i/2024/07/27/66a461a3098aa.webp)"></div><div class="tag-link-right"><div class="tag-link-title">ICLR 2023 Spikformer: When Spiking Neural Network Meets Transformer</div><div class="tag-link-sitename">github.com@ZK-Zhou</div></div><i class="fa-solid fa-angle-right"></i></div></a></div><div class="liushen-tag-link"><a class="tag-Link" target="_blank" href="/safego/?u=aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L3dlaXhpbl8zNzg2NDQ0OS9hcnRpY2xlL2RldGFpbHMvMTI2NzcyODMw" rel="external nofollow noopener noreferrer"><div class="tag-link-tips">🙄引用站外地址，不保证站点的可用性和安全性</div><div class="tag-link-bottom"><div class="tag-link-left" style="background-image:url(https://p.liiiu.cn/i/2024/07/27/66a461b627dc2.webp)"></div><div class="tag-link-right"><div class="tag-link-title">一文通俗入门·脉冲神经网络(SNN)·第三代神经网络</div><div class="tag-link-sitename">Blog.csdn.net@玮涛无敌</div></div><i class="fa-solid fa-angle-right"></i></div></a></div><div class="liushen-tag-link"><a class="tag-Link" target="_blank" href="/safego/?u=aHR0cHM6Ly9kZXZlbG9wZXIudm9sY2VuZ2luZS5jb20vYXJ0aWNsZXMvNzM4MjI2MDM4NzY0NDcwMjc3MQ" rel="external nofollow noopener noreferrer"><div class="tag-link-tips">🙄引用站外地址，不保证站点的可用性和安全性</div><div class="tag-link-bottom"><div class="tag-link-left" style="background-image:url(https://p.liiiu.cn/i/2024/07/27/66a4632bbf06e.webp)"></div><div class="tag-link-right"><div class="tag-link-title">神经网络算法 - 一文搞懂SNN（脉冲神经网络）</div><div class="tag-link-sitename">developer.volcengine.com@架构师带你玩转AI</div></div><i class="fa-solid fa-angle-right"></i></div></a></div><h2 id="声明"><a href="#声明" class="headerlink" title="声明"></a>声明</h2><ul><li>以上内容仅供学术研究和学习交流使用，如有侵权，请联系我进行删除处理。</li><li>若有任何问题或需要进一步讨论，请随时联系我的邮箱：<a href="mailto:01@liushen.fun">01@liushen.fun</a>。</li></ul></article><div class="post-copyright"><div class="post-copyright__title"><span class="post-copyright-info"><h>Spikformer脉冲神经网络学习</h></span></div><div class="post-copyright__type"><span class="post-copyright-info"><a href="https://blog.liushen.fun/posts/67189760/">https://blog.liushen.fun/posts/67189760/</a></span></div><div class="post-copyright-m"><div class="post-copyright-m-info" style="position:relative;z-index:3"><div class="post-copyright-a" style="display:inline-block;width:fit-content;margin:20px 20px 20px 0"><h>作者</h><div class="post-copyright-cc-info"><h>LiuShen</h></div></div><div class="post-copyright-c" style="display:inline-block;width:fit-content;margin:20px 20px 20px 0"><h>发布于</h><div class="post-copyright-cc-info"><h>2024-07-06</h></div></div><div class="post-copyright-u" style="display:inline-block;width:fit-content;margin:20px 20px 20px 0"><h>更新于</h><div class="post-copyright-cc-info"><h>2024-07-06</h></div></div><div class="post-copyright-c" style="display:inline-block;width:fit-content;margin:20px 20px 20px 0"><h>许可协议</h><div class="post-copyright-cc-info"><a rel="noopener" target="_blank" title=" CC BY 4.0" href="https://creativecommons.org/licenses/by-nc-sa/4.0/?ref=chooser-v1">CC BY-NC-SA 4.0</a></div></div></div></div></div><div class="tag_share"><div class="post-meta__tag-list"><a class="post-meta__tags" href="/tags/%E5%AD%A6%E4%B9%A0%E7%AC%94%E8%AE%B0/">学习笔记</a><a class="post-meta__tags" href="/tags/%E6%9C%BA%E5%99%A8%E5%AD%A6%E4%B9%A0/">机器学习</a><a class="post-meta__tags" href="/tags/%E7%A5%9E%E7%BB%8F%E7%BD%91%E7%BB%9C/">神经网络</a></div><div class="post-share"><div class="social-share" data-image="https://p.liiiu.cn/i/2024/07/06/6688d4b63b50f.webp" data-sites="facebook,twitter,wechat,weibo,qq"></div><link rel="stylesheet" href="https://jsd.liiiu.cn/npm/butterfly-extsrc@1.1.4/sharejs/dist/css/share.min.css" media="print" onload='this.media="all"'><script src="https://jsd.liiiu.cn/npm/butterfly-extsrc@1.1.4/sharejs/dist/js/social-share.min.js" defer></script></div></div><div class="post-reward"><div class="reward-button"><i class="fas fa-qrcode"></i>来😍鼠标过来一点~</div><div class="reward-main"><ul class="reward-all"><li class="reward-item"><a href="/config/img/wechat.png" target="_blank"><img class="post-qr-code-img" src="" data-lazy-src="/config/img/wechat.png" alt="微信"></a><div class="post-qr-code-desc">微信</div></li><li class="reward-item"><a href="/config/img/wechat.png" target="_blank"><img class="post-qr-code-img" src="" data-lazy-src="/config/img/wechat.png" alt="支付宝"></a><div class="post-qr-code-desc">支付宝</div></li></ul></div></div><nav class="pagination-post" id="pagination"><a class="prev-post pull-left" href="/posts/13e6e155/" title="东软软件园实习日记"><img class="cover" src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/01/66823117502e0.webp" onerror='onerror=null,src="/img/404.jpg"' alt="cover of previous post"><div class="pagination-info"><div class="label">上一篇</div><div class="prev_info">东软软件园实习日记</div></div></a><a class="next-post pull-right" href="/posts/4dc716ec/" title="Friend-Circle-Lite:轻量友链朋友圈"><img class="cover" src="" data-lazy-src="https://p.liiiu.cn/i/2024/07/19/6699436fe02ec.webp" onerror='onerror=null,src="/img/404.jpg"' alt="cover of next post"><div class="pagination-info"><div class="label">下一篇</div><div class="next_info">Friend-Circle-Lite:轻量友链朋友圈</div></div></a></nav><div class="relatedPosts"><div class="headline"><i class="fas fa-thumbs-up fa-fw"></i><span>相关推荐</span></div><div class="relatedPosts-list"><a href="/posts/57a50b33/" title="DCAMNet钢铁缺陷检测网络复现"><img class="cover" src="" data-lazy-src="https://p.liiiu.cn/i/2024/03/22/65fc5eceae5bd.png" alt="cover"><div class="content is-center"><div class="date"><i class="far fa-calendar-alt fa-fw"></i> 2024-02-01</div><div class="title">DCAMNet钢铁缺陷检测网络复现</div></div></a><a href="/posts/4bb33804/" title="PaddleYOLO训练自己的数据集"><img class="cover" src="" data-lazy-src="https://p.liiiu.cn/i/2025/04/08/67f49876a62e5.webp" alt="cover"><div class="content is-center"><div class="date"><i class="far fa-calendar-alt fa-fw"></i> 2025-04-08</div><div class="title">PaddleYOLO训练自己的数据集</div></div></a><a href="/posts/2229c0f4/" title="CNN+transformer入门学习"><img class="cover" src="" data-lazy-src="https://pic2.zhimg.com/v2-e3b4212c09df05a0484da4eb5392a0d9_r.jpg" alt="cover"><div class="content is-center"><div class="date"><i class="far fa-calendar-alt fa-fw"></i> 2023-01-05</div><div class="title">CNN+transformer入门学习</div></div></a></div></div><hr class="custom-hr"><div id="post-comment"><div class="comment-head"><div class="comment-headline"><i class="fas fa-comments fa-fw"></i> <span>评论</span></div></div><div class="comment-wrap"><div><div id="artalk-wrap"></div></div></div></div></div><div class="aside-content" id="aside-content"><div class="card-widget card-info is-center"><div class="author-info-detail"><p class="author-info-hello">👋 欢迎光临！</p><p class="author-info-desc">你们好呀！我是站长LiuShen！一个快乐，积极，热爱生活的孩纸！😜😜😜</p></div><div class="avatar-img"><img class="mood-icon" src="" data-lazy-src="https://p.liiiu.cn/i/2025/03/14/67d301461a24a.webp" alt="🤤" onerror='this.onerror=null,this.src="/img/friend_404.gif"'><img src="" data-lazy-src="https://p.liiiu.cn/i/2025/03/13/67d2fc82d329c.webp" onerror='this.onerror=null,this.src="/img/friend_404.gif"' alt="avatar"></div><div class="author-info-name">LiuShen</div><div class="site-data"><a href="/archives/"><div class="headline">文章</div><div class="length-num">72</div></a><a href="/tags/"><div class="headline">标签</div><div class="length-num">98</div></a><a href="/categories/"><div class="headline">分类</div><div class="length-num">3</div></a></div><a id="card-info-btn" target="_blank" rel="noopener" href="https://github.com/willow-god"><i class="fab fa-github"></i><span>Follow Me 🛫</span></a><div class="card-info-social-icons"><a class="social-icon" href="mailto:01@liushen.fun" target="_blank" title="Email"><i class="fa-solid fa-envelope"></i></a><a class="social-icon" href="http://wpa.qq.com/msgrd?v=3&amp;uin=3162475700&amp;site=qq&amp;menu=yes" target="_blank" title="QQ：3162475700"><i class="fa-brands fa-qq"></i></a><a class="social-icon" href="https://wakatime.com/@LiuShen" target="_blank" title="Wakatime"><i class="fa-solid fa-chart-column"></i></a><a class="social-icon" href="https://blog.liushen.fun/atom.xml" target="_blank" title="rss地址"><i class="fa-solid fa-rss"></i></a></div></div><div class="card-widget" id="card-poem"><div id="poem_sentence"></div><div id="poem_info"><div id="poem_dynasty"></div><div id="poem_author"></div></div></div><script src="/js/jinrishici.js" charset="utf-8"></script><script>jinrishici.load((function(e){var n=document.querySelector("#poem_sentence"),t=document.querySelector("#poem_author"),o=document.querySelector("#poem_dynasty"),r=e.data.content;r=r.substr(0,r.length-1),n.innerHTML=r,o.innerHTML=e.data.origin.dynasty,t.innerHTML=e.data.origin.author+"《"+e.data.origin.title+"》"}))</script><div class="card-widget card-announcement"><div class="item-headline"><i class="fas fa-bullhorn fa-shake"></i><span>公告</span></div><div class="announcement_content"><p><strong>博客架构概览：</strong><br>⚙️框架核心：Hexo<br>🕹️界面设计：Butterfly<br>🔮安全保障：长亭雷池<br>🔩管理工具：宝塔面板，1Panel<br>🎰服务器支持：阿里云，腾讯云<br>🎲CDN加速：多吉云，CloudFlare<br><strong>快捷跳转地址：</strong><br>🧩个人相册：<a target="_blank" rel="noopener" href="https://xc.liushen.fun">xc.liushen.fun</a><br>🤖个人导航：<a target="_blank" rel="noopener" href="https://www.liushen.fun">www.liushen.fun</a><br></p><img src="" data-lazy-src="/config/img/notice.gif" alt="可爱捏" title="可爱捏" style="width:100%;border-radius:10px"></div></div><div class="sticky_layout"><div class="card-widget" id="card-toc"><div class="item-headline"><i class="fas fa-stream"></i><span>目录</span><span class="toc-percentage"></span></div><div class="toc-content"><ol class="toc"><li class="toc-item toc-level-2"><a class="toc-link" href="#%E7%A2%8E%E7%A2%8E%E5%BF%B5"><span class="toc-text">碎碎念</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#%E8%83%8C%E6%99%AF"><span class="toc-text">背景</span></a><ol class="toc-child"><li class="toc-item toc-level-3"><a class="toc-link" href="#%E8%84%89%E5%86%B2%E7%A5%9E%E7%BB%8F%E7%BD%91%E7%BB%9C"><span class="toc-text">脉冲神经网络</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#Transformer%E6%A8%A1%E5%9E%8B"><span class="toc-text">Transformer模型</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#Spikformer"><span class="toc-text">Spikformer</span></a></li></ol></li><li class="toc-item toc-level-2"><a class="toc-link" href="#%E5%8E%9F%E7%90%86"><span class="toc-text">原理</span></a><ol class="toc-child"><li class="toc-item toc-level-3"><a class="toc-link" href="#%E8%84%89%E5%86%B2%E7%A5%9E%E7%BB%8F%E7%BD%91%E7%BB%9C-1"><span class="toc-text">脉冲神经网络</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E6%B3%A8%E6%84%8F%E5%8A%9B%E6%9C%BA%E5%88%B6-Transformer"><span class="toc-text">注意力机制&amp;Transformer</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#Spikformer-1"><span class="toc-text">Spikformer</span></a></li></ol></li><li class="toc-item toc-level-2"><a class="toc-link" href="#%E5%B0%9D%E8%AF%95%E5%AE%9E%E7%8E%B0"><span class="toc-text">尝试实现</span></a><ol class="toc-child"><li class="toc-item toc-level-3"><a class="toc-link" href="#%E7%8E%AF%E5%A2%83"><span class="toc-text">环境</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E6%95%B0%E6%8D%AE%E9%9B%86"><span class="toc-text">数据集</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#Baseline%E8%AE%AD%E7%BB%83"><span class="toc-text">Baseline训练</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#Spikformer%E6%A8%A1%E5%9E%8B%E5%A4%8D%E7%8E%B0"><span class="toc-text">Spikformer模型复现</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#SDTv2%E6%A8%A1%E5%9E%8B"><span class="toc-text">SDTv2模型</span></a></li></ol></li><li class="toc-item toc-level-2"><a class="toc-link" href="#%E7%BB%93%E6%9E%9C%E5%B1%95%E7%A4%BA"><span class="toc-text">结果展示</span></a><ol class="toc-child"><li class="toc-item toc-level-3"><a class="toc-link" href="#%E8%AF%84%E4%BC%B0%E7%BB%93%E6%9E%9C"><span class="toc-text">评估结果</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E7%BB%93%E6%9E%9C%E5%88%86%E6%9E%90"><span class="toc-text">结果分析</span></a></li></ol></li><li class="toc-item toc-level-2"><a class="toc-link" href="#%E6%80%BB%E7%BB%93"><span class="toc-text">总结</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#%E5%8F%82%E8%80%83%E6%96%87%E7%AB%A0"><span class="toc-text">参考文章</span></a></li><li class="toc-item toc-level-2"><a class="toc-link" href="#%E5%A3%B0%E6%98%8E"><span class="toc-text">声明</span></a></li></ol></div></div><div class="card-widget card-recent-post"><div class="item-headline"><i class="fas fa-history"></i><span>最新文章</span></div><div class="aside-list"><div class="aside-list-item"><a class="thumbnail" href="/posts/7915ee6b/" title="数据库可视化WEB工具对比"><img src="" data-lazy-src="https://p.liiiu.cn/i/2025/05/25/6832cc105bc41.webp" onerror='this.onerror=null,this.src="/img/404.jpg"' alt="数据库可视化WEB工具对比"></a><div class="content"><a class="title" href="/posts/7915ee6b/" title="数据库可视化WEB工具对比">数据库可视化WEB工具对比</a><time datetime="2025-05-25T10:01:21.000Z" title="更新于 2025-05-25 18:01:21">2025-05-25</time></div></div><div class="aside-list-item"><a class="thumbnail" href="/posts/40702a0d/" title="本地实现HEXO文章AI摘要"><img src="" data-lazy-src="https://p.liiiu.cn/i/2025/05/06/6819cd4532457.webp" onerror='this.onerror=null,this.src="/img/404.jpg"' alt="本地实现HEXO文章AI摘要"></a><div class="content"><a class="title" href="/posts/40702a0d/" title="本地实现HEXO文章AI摘要">本地实现HEXO文章AI摘要</a><time datetime="2025-05-06T16:01:21.000Z" title="更新于 2025-05-07 00:01:21">2025-05-07</time></div></div><div class="aside-list-item"><a class="thumbnail" href="/posts/5f71a4b1/" title="耗子面板和DPanel简单体验"><img src="" data-lazy-src="https://p.liiiu.cn/i/2025/04/27/680dc7916ef34.webp" onerror='this.onerror=null,this.src="/img/404.jpg"' alt="耗子面板和DPanel简单体验"></a><div class="content"><a class="title" href="/posts/5f71a4b1/" title="耗子面板和DPanel简单体验">耗子面板和DPanel简单体验</a><time datetime="2025-04-28T14:48:21.000Z" title="更新于 2025-04-28 22:48:21">2025-04-28</time></div></div><div class="aside-list-item"><a class="thumbnail" href="/posts/caee2d9f/" title="美化你的RSS订阅地址"><img src="" data-lazy-src="https://p.liiiu.cn/i/2025/04/18/68024a618942b.webp" onerror='this.onerror=null,this.src="/img/404.jpg"' alt="美化你的RSS订阅地址"></a><div class="content"><a class="title" href="/posts/caee2d9f/" title="美化你的RSS订阅地址">美化你的RSS订阅地址</a><time datetime="2025-04-18T12:48:21.000Z" title="更新于 2025-04-18 20:48:21">2025-04-18</time></div></div></div></div></div></div></main><footer id="footer"><div id="footer-wrap" style="background:0 0"><div id="footer_icons"><div><a class="icon_link" rel="noopener external nofollow" href="https://www.liushen.fun/" title="导航站点" target="_blank"><i class="fa-solid fa-compass"></i></a><a class="icon_link" rel="noopener external nofollow" href="https://admin.qidian.qq.com/static_proxy/b2b-qq/wpa-link/index.html#/person?uin=3162475700" title="联系QQ" target="_blank"><i class="fa-brands fa-qq"></i></a><a class="icon_link" rel="noopener external nofollow" href="https://github.com/willow-god" title="我的github主页" target="_blank"><i class="fa-brands fa-github"></i></a><a class="icon_link" rel="noopener external nofollow" href="mailto:01@liushen.fun" title="发送邮件至博主邮箱" target="_blank"><i class="fa-solid fa-envelope"></i></a></div><div class="footer_logo_container" onclick="btf.scrollToDest(0,500)" title="返回顶部"><img class="footer_logo" src="" data-lazy-src="/img/footer.gif"></div><div><a class="icon_link" rel="noopener external nofollow" href="https://wakatime.com/@LiuShen" title="Wikitime" target="_blank"><i class="fa-solid fa-clock"></i></a><a class="icon_link" rel="noopener external nofollow" href="https://gitlab.com/" title="gitlab" target="_blank"><i class="fa-brands fa-gitlab"></i></a><a class="icon_link" href="/shuoshuo/" title="日常说说" data-pjax-state="data-pjax-state"><i class="fa-solid fa-file-pen"></i></a><a class="icon_link" href="/comment/" title="留言板" data-pjax-state="data-pjax-state"><i class="fa-solid fa-comment"></i></a></div></div><div id="footer_content"><div class="footer-group"><h3 class="footer-title">关于本站</h3><div class="footer-links"><a class="footer-item" target="_blank" href="https://www.liushen.fun/">导航站点</a><a class="footer-item" href="/shuoshuo/">日常说说</a><a class="footer-item" target="_blank" href="https://um.liushen.fun/share/bIEnQp0xnMxD8c9V/blog.liushen.fun">访客信息</a><a class="footer-item" target="_blank" href="https://mm.liushen.fun/">提笔摘星</a><a class="footer-item" target="_blank" href="https://status.liushen.fun/">在线状态</a><a class="footer-item" href="/subscribe/">订阅本站</a></div></div><div class="footer-group"><h3 class="footer-title">加入组织</h3><div class="footer-links"><a class="footer-item" target="_blank" href="https://www.boyouquan.com/home">博友圈</a><a class="footer-item" target="_blank" href="https://github.com/timqian/chinese-independent-blogs">中博列表</a><a class="footer-item" target="_blank" href="https://blogwe.com/">博客我们</a><a class="footer-item" target="_blank" href="https://storeweb.cn/">个性商店</a><a class="footer-item" target="_blank" href="https://bf.zzxworld.com/">发现博客</a><a class="footer-item" target="_blank" href="https://ourblo.gs/">OurBlogs</a></div></div><div class="footer-group"><h3 class="footer-title">文章整理</h3><div class="footer-links"><a class="footer-item" href="/categories/website/">博客管理</a><a class="footer-item" href="/categories/learning/">学习资料</a><a class="footer-item" href="/categories/daily-share/">日常分享</a><a class="footer-item" href="/archives/">时光卷轴</a><a class="footer-item" href="/charts/">文章通览</a><a class="footer-item" href="/categories/">查看全部</a></div></div><div class="footer-group"><h3 class="footer-title">文章标签</h3><div class="footer-links"><a class="footer-item" href="/tags/JavaScript/">JS知识</a><a class="footer-item" href="/tags/Hexo/">本站框架</a><a class="footer-item" href="/tags/机器学习/">机器学习</a><a class="footer-item" href="/tags/日记/">个人日记</a><a class="footer-item" href="/tags/CSS/">CSS知识</a><a class="footer-item" href="/tags/">查看全部</a></div></div><div class="footer-group"><h3 class="footer-title">自建工具</h3><div class="footer-links"><a class="footer-item" target="_blank" rel="noopener" href="https://chat.liushen.fun/">清羽AI</a><a class="footer-item" target="_blank" rel="noopener" href="https://hot.liushen.fun/">今日热榜</a><a class="footer-item" target="_blank" rel="noopener" href="https://cover.qyliu.top/">封面设计</a><a class="footer-item" target="_blank" rel="noopener" href="https://icon.qyliu.top/">万变图标</a><a class="footer-item" target="_blank" rel="noopener" href="https://tmail.qyliu.top/">临时邮箱</a><a class="footer-item" target="_blank" rel="noopener" href="https://share.liushen.fun/">内容中转</a></div></div><div class="footer-group" id="friend-links-in-footer"><h3 class="footer-title">友链<button title="换一批" href="javascript:;" onclick="liushen.randomLink()"><i class="fa-solid fa-rotate-right"></i></button></h3><div class="footer-links"><a class="footer-item" target="_blank" href="https://blog.liushen.fun">测试1</a><a class="footer-item" target="_blank" href="https://blog.liushen.fun">测试2</a><a class="footer-item" target="_blank" href="https://blog.liushen.fun">测试3</a><a class="footer-item" target="_blank" href="https://blog.liushen.fun">测试4</a><a class="footer-item" target="_blank" href="https://blog.liushen.fun">测试5</a><a class="footer-item" href="/link/" data-pjax-state="data-pjax-state">查看更多</a></div></div></div><div id="footer-bottom"><div class="footer-bottom-content"><div class="footer-bottom-left"><span class="copyright">©2021 - 2025 By <a target="_blank" rel="noopener" href="https://blog.liushen.fun/about/" title="点击访问&quot;LiuShen&quot;的主页" style="margin-left:5px">LiuShen</a></span><div><a class="footer-bottom-link" target="_blank" href="https://beian.miit.gov.cn/" rel="noopener external nofollow" title="工信部备案号">陕ICP备2024028531号-2</a><a class="footer-bottom-link" target="_blank" href="https://beian.mps.gov.cn/#/query/webSearch?code=61011602000637" rel="noopener external nofollow" title="公安备案号">陕公网安备61011602000637号</a></div></div><div class="footer-bottom-right"><div id="runtime" title="本站运行时间">本站已苟活：0 天 0 时 0 分 0 秒</div><div><a class="footer-bottom-link" target="_blank" href="https://www.dogecloud.com/?iuid=9173" rel="noopener external nofollow" title="本站通过多吉云CDN提供站点加速">多吉云CDN</a><a class="footer-bottom-link" target="_blank" href="https://hexo.io/zh-cn/" rel="noopener external nofollow" title="本站使用Hexo架构搭建而成">Hexo静态框架</a><a class="footer-bottom-link" target="_blank" href="https://butterfly.js.org/" rel="noopener external nofollow" title="本站主题由Butterfly主题魔改而成">LiuShen主题</a></div></div></div></div></div><script>window.liushen||(window.liushen={saveData:(e,n)=>{localStorage.setItem(e,JSON.stringify({time:Date.now(),data:n}))},loadData:(e,n)=>{let t=JSON.parse(localStorage.getItem(e));if(t){let e=Date.now()-t.time;if(e>=0&&e<6e4*n)return t.data}return null},runtime:()=>{const e=e=>e>9?e:"0"+e,n=new Date("2021/12/12 01:27:36").getTime(),t=Date.now();let i=Math.round((t-n)/1e3),o="本站已苟活：";i>=86400&&(o+=`${e(Math.floor(i/86400))} 天 `,i%=86400),i>=3600&&(o+=`${e(Math.floor(i/3600))} 时 `,i%=3600),i>=60&&(o+=`${e(Math.floor(i/60))} 分 `,i%=60),o+=`${e(i)} 秒`;const l=document.getElementById("runtime");l&&(l.innerHTML=o),setTimeout(window.liushen.runtime,1e3)},randomLink:()=>{let e=window.liushen.loadData("links",30);if(e){let n=document.querySelectorAll("#friend-links-in-footer .footer-item");if(!n.length)return;for(let t=0;t<n.length;t++){let i=Math.floor(Math.random()*e.length);n[t].innerText=e[i].name,n[t].href=e[i].link,e.splice(i,1)}}else fetch("/flink_count.json").then((e=>e.json())).then((e=>{window.liushen.saveData("links",e.link_list),window.liushen.randomLink()}))}}),window.liushen.randomLink(),document.addEventListener("DOMContentLoaded",window.liushen.randomLink),window.liushen.runtime()</script></footer></div><div id="rightside"><div id="rightside-config-hide"><button id="translateLink" type="button" title="简繁转换" style="width:35px">繁</button><button id="readmode" type="button" title="阅读模式"><i class="fas fa-book-open"></i><span class="rightside-text">阅读模式</span></button><button id="darkmode" type="button" title="日间和夜间模式切换"><i class="fa-regular fa-star-half-stroke"></i><span class="rightside-text">亮暗切换</span></button><button id="cat" onclick="toggleLive2dVisibility()" title="小猫显隐"><i class="fa-solid fa-cat"></i><span class="rightside-text">小猫显隐</span></button><button id="hide-aside-btn" type="button" title="单栏和双栏切换"><i class="fa-solid fa-arrows-left-right-to-line"></i><span class="rightside-text">侧栏显隐</span></button></div><div id="rightside-config-show"><button id="rightside-config" type="button" title="设置"><i class="fas fa-cog fa-spin"></i><span class="rightside-text">更多设置</span></button><button class="close" id="mobile-toc-button" type="button" title="目录"><i class="fas fa-list-ul"></i><span class="rightside-text">显示目录</span></button><button id="fullscreen" onclick="toggleFullScreen()" title="全屏切换"><i class="fa-solid fa-expand"></i><span class="rightside-text">全屏切换</span></button><a id="to_comment" href="#post-comment" title="前往评论"><i class="fas fa-comments"></i><span class="rightside-text">快速评论</span></a><button id="go-up" type="button" title="回到顶部"><span class="scroll-percent"></span><i class="fas fa-arrow-up"></i><span class="rightside-text">回到顶部</span></button></div></div><div id="rightMenu"><div class="rightMenu-group rightMenu-small"><div class="rightMenu-item" id="menu-backward"><i class="fa-solid fa-arrow-left"></i></div><div class="rightMenu-item" id="menu-forward"><i class="fa-solid fa-arrow-right"></i></div><div class="rightMenu-item" id="menu-refresh"><i class="fa-solid fa-arrow-rotate-right"></i></div><div class="rightMenu-item" id="menu-home"><i class="fa-solid fa-house"></i></div></div><div class="rightMenu-group rightMenu-line hide" id="menu-text"><a class="rightMenu-item" id="copy" href="javascript:rm.copySelect();"><i class="fa-solid fa-copy"></i><span>复制选中文字</span></a><a class="rightMenu-item" id="reply" href="javascript:rm.replySelect();"><i class="fa-regular fa-comment"></i><span>评论选中段落</span></a></div><div class="rightMenu-group rightMenu-line rightMenuOther"><a class="rightMenu-item menu-link" href="/archives/"><i class="fa-solid fa-archive"></i><span>文章时间线</span></a><a class="rightMenu-item menu-link" href="/categories/"><i class="fa-solid fa-folder-open"></i><span>文章分大类</span></a><a class="rightMenu-item menu-link" href="/tags/"><i class="fa-solid fa-tags"></i><span>文章小标签</span></a></div><div class="rightMenu-group rightMenu-line rightMenuNormal"><a class="rightMenu-item menu-link" id="menu-radompage" href="/comment/"><i class="fa-solid fa-shoe-prints"></i><span>随心留言板</span></a><div class="rightMenu-item" id="menu-translate"><i class="fa-solid fa-earth-asia"></i><span>繁简模式切换</span></div><div class="rightMenu-item" id="menu-live2dvisibility"><i class="fa-solid fa-cat"></i><span>小猫显示隐藏</span></div><div class="rightMenu-item" id="menu-print"><i class="fa-solid fa-print fa-fw"></i><span>打印整个页面</span></div><a class="rightMenu-item menu-link" id="statement" href="/statement/"><i class="fa-regular fa-copyright fa-fw"></i><span>网站声明</span></a></div></div><div id="rightmenu-mask"></div><div><script src="/js/others.js?v=5.0.0"></script><script src="/js/utils.js?v=5.0.0"></script><script src="/js/main.js?v=5.0.0"></script><script src="https://jsd.liiiu.cn/npm/echarts@5.5.1/dist/echarts.simple.min.js"></script><script src="/js/rightmenu.js?v=5.0.0"></script><script src="/js/jinrishici.js"></script><script src="/js/tw_cn.js?v=5.0.0"></script><script src="https://jsd.liiiu.cn/npm/@fancyapps/ui@5.0.36/dist/fancybox/fancybox.umd.min.js"></script><script src="https://jsd.liiiu.cn/npm/instant.page@5.2.0/instantpage.min.js" type="module"></script><script src="https://jsd.liiiu.cn/npm/vanilla-lazyload@19.1.3/dist/lazyload.iife.min.js"></script><div class="js-pjax"><script>(()=>{let t=null;const e=null,o="shuoshuo"===GLOBAL_CONFIG_SITE.pageType,a=()=>{t&&(t.destroy(),t=null)},n=e=>t&&t.setDarkMode("dark"===e),l=(l=document,i=location.pathname)=>{t=Artalk.init({el:l.querySelector("#artalk-wrap"),server:"https://atk.liushen.fun",site:"清羽飞扬",darkMode:"dark"===document.documentElement.getAttribute("data-theme"),...e,pageKey:i,imgUploader:function(t){let e="Bearer 28|q18njD3pLtHiFy9WidqQrREwqZzb8Zionr8WzilI",o="https://www.baiwulin.work/api/v1/upload",a=new Headers;a.set("Accept","application/json"),a.set("Authorization",e);let n=new FormData;n.append("file",t);const l=(t,e,o)=>fetch(t,{method:"POST",body:e,headers:o}).then((t=>t.json())).then((t=>t.data.links.url)).catch((t=>(console.error("Image upload failed:",t),null)));return l(o,n,a).then((t=>t||(console.warn("雾林图床接口失败，尝试替换为秋叶图床"),e="Bearer 11|lsReISlSS0dyhDzt35ovtfYefGbWSRW2vvntPXs7",o="https://imgse.koxiuqiu.cc/api/v1/upload",a.set("Authorization",e),l(o,n,a))))}}),"null"!==GLOBAL_CONFIG.lightbox&&(t.on("list-loaded",(()=>{t.ctx.get("list").getCommentNodes().forEach((t=>{const e=t.getRender().$content;btf.loadLightbox(e.querySelectorAll("img:not([atk-emoticon])"))}))})),o&&(window.shuoshuoComment.destroyArtalk=()=>{a(),l.children.length&&(l.innerHTML="",l.classList.add("no-comment"))}),btf.addGlobalFn("pjaxSendOnce",a,"destroyArtalk"),btf.addGlobalFn("themeChange",n,"artalk"))},i=async(t,e)=>{"object"==typeof Artalk||(await btf.getCSS("https://jsd.liiiu.cn/npm/artalk@2.9.1/dist/Artalk.min.css"),await btf.getScript("https://jsd.liiiu.cn/npm/artalk@2.9.1/dist/Artalk.min.js")),l(t,e)};o?window.shuoshuoComment={loadComment:i}:setTimeout(i,0)})()</script></div><script>window.newestComments={changeContent:e=>(""===e||(e=(e=(e=(e=(e=e.replace(/<img.*?src="(.*?)"?[^\>]+>/gi,"[图片]")).replace(/<a[^>]+?href=["']?([^"']+)["']?[^>]*>([^<]+)<\/a>/gi,"[链接]")).replace(/<pre><code>.*?<\/pre>/gi,"[代码]")).replace(/<code>.*?<\/code>/gi,"[代码]")).replace(/<[^>]+>/g,"")).length>150&&(e=e.substring(0,150)+"..."),e),generateHtml:(e,t)=>{let n="";if(e.length)for(let t=0;t<e.length;t++){if(n+='<div class="aside-list-item">',e[t].avatar){const a="data-lazy-src";n+=`<a href="${e[t].url}" class="thumbnail"><img ${a}="${e[t].avatar}" alt="${e[t].nick}"></a>`}n+=`<div class="content">\n        <a class="comment" href="${e[t].url}" title="${e[t].content}">${e[t].content}</a>\n        <div class="name"><span>${e[t].nick} / </span><time datetime="${e[t].date}">${btf.diffDate(e[t].date,!0)}</time></div>\n        </div></div>`}else n+="暂无评论";t.innerHTML=n,window.lazyLoadInstance&&window.lazyLoadInstance.update(),window.pjax&&window.pjax.refresh(t)},newestCommentInit:(e,t)=>{const n=document.querySelector("#card-newest-comments .aside-list");if(n){const a=btf.saveToLocal.get(e);a?newestComments.generateHtml(JSON.parse(a),n):t(n)}},run:(e,t)=>{newestComments.newestCommentInit(e,t),btf.addGlobalFn("pjaxComplete",(()=>newestComments.newestCommentInit(e,t)),e)}}</script><script>window.addEventListener("load",(()=>{const t="artalk-newest-comments",{changeContent:a,generateHtml:e,run:n}=window.newestComments,r=new URLSearchParams({site_name:"清羽飞扬",limit:"5"});n(t,(async n=>{try{const s=await fetch(`https://atk.liushen.fun/api/v2/stats/latest_comments?${r}`),o=await s.json(),{avatarCdn:c,avatarDefault:i}=await(async()=>{const t=t=>t.startsWith("d=")?t:`d=${t}`;try{const a=await fetch("https://atk.liushen.fun/api/v2/conf"),e=await a.json(),{mirror:n,params:r,default:s}=e.frontend_conf.gravatar;return{avatarCdn:n,avatarDefault:t(r||s)}}catch(a){return console.error(a),{avatarCdn:"",avatarDefault:t("")}}})(),l=o.data.map((t=>({avatar:c&&t.email_encrypted?`${c}${t.email_encrypted}?${i}`:"",content:a(t.content_marked),nick:t.nick,url:t.page_url,date:t.date})));btf.saveToLocal.set(t,JSON.stringify(l),10/1440),e(l,n)}catch(t){console.log(t),n.textContent="无法获取评论，请确认相关配置是否正确"}}))}))</script><script src="/config/memos/memos.js"></script><script id="canvas_nest" defer color="128,128,128" opacity="0.7" zindex="-1" count="99" mobile="false" src="https://jsd.liiiu.cn/npm/butterfly-extsrc@1.1.4/dist/canvas-nest.min.js"></script><link rel="stylesheet" href="https://jsd.liiiu.cn/npm/aplayer@1.10.1/dist/APlayer.min.css" media="print" onload='this.media="all"'><script src="https://jsd.liiiu.cn/npm/aplayer@1.10.1/dist/APlayer.min.js"></script><script src="https://jsd.liiiu.cn/npm/meting@2.0.1/dist/Meting.min.js"></script><script src="https://jsd.liiiu.cn/npm/pjax@0.2.8/pjax.min.js"></script><script>(()=>{window.pjax=new Pjax({elements:'a:not([target="_blank"])',selectors:["head > title","#config-diff","#body-wrap","#rightside-config-hide","#rightside-config-show",".js-pjax"],cacheBust:!1,analytics:!1,scrollRestoration:!1});const e=e=>{e&&Object.values(e).forEach((e=>e()))};document.addEventListener("pjax:send",(()=>{btf.removeGlobalFnEvent("pjaxSendOnce"),btf.removeGlobalFnEvent("themeChange");const t=document.body.classList;t.contains("read-mode")&&t.remove("read-mode"),e(window.globalFn.pjaxSend)})),document.addEventListener("pjax:complete",(()=>{btf.removeGlobalFnEvent("pjaxCompleteOnce"),document.querySelectorAll("script[data-pjax]").forEach((e=>{const t=document.createElement("script"),n=e.text||e.textContent||e.innerHTML||"";Array.from(e.attributes).forEach((e=>t.setAttribute(e.name,e.value))),t.appendChild(document.createTextNode(n)),e.parentNode.replaceChild(t,e)})),e(window.globalFn.pjaxComplete)})),document.addEventListener("pjax:error",(e=>{404===e.request.status&&pjax.loadUrl("/404")}))})()</script><script async data-pjax="" src="/config/busuanzi/busuanzi.js"></script><div id="algolia-search"><div class="search-dialog"><nav class="search-nav"><span class="search-dialog-title">搜索</span><button class="search-close-button"><i class="fas fa-times"></i></button></nav><div class="search-wrap"><div id="algolia-search-input"></div><hr><div id="algolia-search-results"><div id="algolia-hits"></div><div id="algolia-pagination"></div><div id="algolia-info"><div class="algolia-stats"></div><div class="algolia-poweredBy"></div></div></div></div></div><div id="search-mask"></div><script src="https://jsd.liiiu.cn/npm/algoliasearch@5.8.1/dist/lite/builds/browser.umd.min.js"></script><script src="https://jsd.liiiu.cn/npm/instantsearch.js@4.75.0/dist/instantsearch.production.min.js"></script><script src="/js/search/algolia.js?v=5.0.0"></script></div></div><div class="needEndHide" id="nav-music"><div id="nav-music-hoverTips" onclick="liuMusic.musicToggle()">音乐已暂停</div><meting-js id="13597135963" server="netease" type="playlist" mutex="true" preload="none" data-lrctype="0" order="random" volume="0.8" api="https://met.liiiu.cn/meting/api?server=:server&amp;type=:type&amp;id=:id&amp;r=:r"></meting-js></div><script data-pjax="">function butterfly_swiper_injector_config(){var a=document.getElementById("recent-posts");console.log("已挂载butterfly_swiper"),a.insertAdjacentHTML("afterbegin",'<div class="recent-post-item" style="height: auto;width: 100%"><div class="blog-slider swiper-container-fade swiper-container-horizontal" id="swiper_container"><div class="blog-slider__wrp swiper-wrapper" style="transition-duration: 0ms;"><div class="blog-slider__item swiper-slide" style="width: 750px; opacity: 1; transform: translate3d(0px, 0px, 0px); transition-duration: 0ms;"><a class="blog-slider__img" onclick="pjax.loadUrl(&quot;posts/4bb33804/&quot;);" href="javascript:void(0);" alt=""><img width="48" height="48" src= "" data-lazy-src="https://p.liiiu.cn/i/2025/04/08/67f49876a62e5.webp" alt="" onerror="this.src=/img/error-page.png; this.onerror = null;"/></a><div class="blog-slider__content"><span class="blog-slider__code">2025-04-08</span><a class="blog-slider__title" onclick="pjax.loadUrl(&quot;posts/4bb33804/&quot;);" href="javascript:void(0);" alt="">PaddleYOLO训练自己的数据集</a><div class="blog-slider__text">近期一直在研究毕业设计，在其中，我涉及到了PaddleYOLO的训练和部署，在网上的教程较少，经过不断努力，我也算是跑出来了，所以在这里分享出来做个记录，防止下次使用又忘记了怎么搞。</div><a class="blog-slider__button" onclick="pjax.loadUrl(&quot;posts/4bb33804/&quot;);" href="javascript:void(0);" alt="">详情       </a></div></div><div class="blog-slider__item swiper-slide" style="width: 750px; opacity: 1; transform: translate3d(0px, 0px, 0px); transition-duration: 0ms;"><a class="blog-slider__img" onclick="pjax.loadUrl(&quot;posts/5f71a4b1/&quot;);" href="javascript:void(0);" alt=""><img width="48" height="48" src= "" data-lazy-src="https://p.liiiu.cn/i/2025/04/27/680dc7916ef34.webp" alt="" onerror="this.src=/img/error-page.png; this.onerror = null;"/></a><div class="blog-slider__content"><span class="blog-slider__code">2025-04-28</span><a class="blog-slider__title" onclick="pjax.loadUrl(&quot;posts/5f71a4b1/&quot;);" href="javascript:void(0);" alt="">耗子面板和DPanel简单体验</a><div class="blog-slider__text">最近毕业设计临近尾声，代码也敲完了，目前就剩稍微的调优，就可以开始写论文啦！在闲暇时间，我也尝试部署了一些不一样的面板程序，总感觉1panel稍微有些复杂，很多功能并用不上，比如GPU，下面简单分享一下我的体验。</div><a class="blog-slider__button" onclick="pjax.loadUrl(&quot;posts/5f71a4b1/&quot;);" href="javascript:void(0);" alt="">详情       </a></div></div><div class="blog-slider__item swiper-slide" style="width: 750px; opacity: 1; transform: translate3d(0px, 0px, 0px); transition-duration: 0ms;"><a class="blog-slider__img" onclick="pjax.loadUrl(&quot;posts/40702a0d/&quot;);" href="javascript:void(0);" alt=""><img width="48" height="48" src= "" data-lazy-src="https://p.liiiu.cn/i/2025/05/06/6819cd4532457.webp" alt="" onerror="this.src=/img/error-page.png; this.onerror = null;"/></a><div class="blog-slider__content"><span class="blog-slider__code">2025-05-07</span><a class="blog-slider__title" onclick="pjax.loadUrl(&quot;posts/40702a0d/&quot;);" href="javascript:void(0);" alt="">本地实现HEXO文章AI摘要</a><div class="blog-slider__text">五一假期结束啦！这五天啥也没干，就在宿舍敲代码，都快无聊死了，恰逢找到了一些免费API，就自己实现一个AI摘要吧！利用API生成摘要文本放在文章头部，再通过hexo进行渲染就好啦！</div><a class="blog-slider__button" onclick="pjax.loadUrl(&quot;posts/40702a0d/&quot;);" href="javascript:void(0);" alt="">详情       </a></div></div><div class="blog-slider__item swiper-slide" style="width: 750px; opacity: 1; transform: translate3d(0px, 0px, 0px); transition-duration: 0ms;"><a class="blog-slider__img" onclick="pjax.loadUrl(&quot;posts/7915ee6b/&quot;);" href="javascript:void(0);" alt=""><img width="48" height="48" src= "" data-lazy-src="https://p.liiiu.cn/i/2025/05/25/6832cc105bc41.webp" alt="" onerror="this.src=/img/error-page.png; this.onerror = null;"/></a><div class="blog-slider__content"><span class="blog-slider__code">2025-05-25</span><a class="blog-slider__title" onclick="pjax.loadUrl(&quot;posts/7915ee6b/&quot;);" href="javascript:void(0);" alt="">数据库可视化WEB工具对比</a><div class="blog-slider__text">最近迁移服务器，并且搞了一个1P专业版玩玩，感觉效果很不错，想找一个管理数据库的服务，其中看了phpmyadmin等一些知名项目，但是都不太符合我的要求，最后经过筛选，找到了比较合适的，分享给大家！</div><a class="blog-slider__button" onclick="pjax.loadUrl(&quot;posts/7915ee6b/&quot;);" href="javascript:void(0);" alt="">详情       </a></div></div><div class="blog-slider__item swiper-slide" style="width: 750px; opacity: 1; transform: translate3d(0px, 0px, 0px); transition-duration: 0ms;"><a class="blog-slider__img" onclick="pjax.loadUrl(&quot;posts/caee2d9f/&quot;);" href="javascript:void(0);" alt=""><img width="48" height="48" src= "" data-lazy-src="https://p.liiiu.cn/i/2025/04/18/68024a618942b.webp" alt="" onerror="this.src=/img/error-page.png; this.onerror = null;"/></a><div class="blog-slider__content"><span class="blog-slider__code">2025-04-18</span><a class="blog-slider__title" onclick="pjax.loadUrl(&quot;posts/caee2d9f/&quot;);" href="javascript:void(0);" alt="">美化你的RSS订阅地址</a><div class="blog-slider__text">近期天天忙着毕业设计，冷落了站点文章的更新，最近也终于算是快要搞完了，于是开始捣鼓一些好玩的东西，在浏览阮一峰大佬的周刊时，了解到了RSS也能美化，折腾了一下，没想到还真实现了，在此分享一下。</div><a class="blog-slider__button" onclick="pjax.loadUrl(&quot;posts/caee2d9f/&quot;);" href="javascript:void(0);" alt="">详情       </a></div></div></div><div class="blog-slider__pagination swiper-pagination-clickable swiper-pagination-bullets"></div></div></div>')}for(var elist="null".split(","),cpage=location.pathname,epage="/",flag=0,i=0;i<elist.length;i++)cpage.includes(elist[i])&&flag++;("all"===epage&&0==flag||epage===cpage)&&butterfly_swiper_injector_config()</script><script defer src="https://jsd.liiiu.cn/npm/swiper@11.1.14/swiper-bundle.min.js"></script><script defer data-pjax="" src="/config/swiper/swiper_init.js"></script><script src="https://jsd.liiiu.cn/npm/live2d-widget@^3.1.3/lib/L2Dwidget.min.js"></script><script>L2Dwidget.init({pluginModelPath:"assets/",model:{scale:1,hHeadPos:.5,vHeadPos:.618,jsonPath:"/live2dw/assets/tororo.model.json"},display:{superSample:2,width:250,height:500,position:"left",hOffset:-20,vOffset:-90},mobile:{show:!1,scale:.5},react:{opacityDefault:.7,opacityOnHover:.2},log:!1,pluginJsPath:"lib/",pluginRootPath:"live2dw/",tagMode:!1})</script></body></html>