<!DOCTYPE html><html lang="zh-CN" data-theme="light"><head><meta charset="UTF-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="viewport" content="width=device-width, initial-scale=1.0,viewport-fit=cover"><title>hadoop学习 | mundane</title><meta name="author" content="frode"><meta name="copyright" content="frode"><meta name="format-detection" content="telephone=no"><meta name="theme-color" content="#ffffff"><meta name="description" content="Hadoop   HDFSHDFS Hadoop分布式文件系统 分布式软件应该具备的特性： 1.分布式存储的优点？无限扩展支撑海量数据存储 2.元数据记录的功能？快速定位文件位置便于查找 3.文件分块存储的好处是什么？针对块并行操作提高效率 4.设置副本备份的作用是什么？冗余存储保障数据安全  文件系统协议 各角色职责主角色：NameNode  NameNode内部通过内存和磁盘文件两种方式管理元">
<meta property="og:type" content="article">
<meta property="og:title" content="hadoop学习">
<meta property="og:url" content="https://gitee.com/frode117/2023/04/27/hadoop-study/index.html">
<meta property="og:site_name" content="mundane">
<meta property="og:description" content="Hadoop   HDFSHDFS Hadoop分布式文件系统 分布式软件应该具备的特性： 1.分布式存储的优点？无限扩展支撑海量数据存储 2.元数据记录的功能？快速定位文件位置便于查找 3.文件分块存储的好处是什么？针对块并行操作提高效率 4.设置副本备份的作用是什么？冗余存储保障数据安全  文件系统协议 各角色职责主角色：NameNode  NameNode内部通过内存和磁盘文件两种方式管理元">
<meta property="og:locale" content="zh_CN">
<meta property="og:image" content="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/img/67190321_p0.png">
<meta property="article:published_time" content="2023-04-27T14:47:36.000Z">
<meta property="article:modified_time" content="2023-04-28T00:21:17.747Z">
<meta property="article:author" content="frode">
<meta property="article:tag" content="hadoop">
<meta property="article:tag" content="hive">
<meta name="twitter:card" content="summary">
<meta name="twitter:image" content="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/img/67190321_p0.png"><link rel="shortcut icon" href="/mundane/img/ai.jpg"><link rel="canonical" href="https://gitee.com/frode117/2023/04/27/hadoop-study/index.html"><link rel="preconnect" href="//cdn.jsdelivr.net"/><link rel="preconnect" href="//busuanzi.ibruce.info"/><link rel="stylesheet" href="/mundane/css/index.css"><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free/css/all.min.css" media="print" onload="this.media='all'"><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/node-snackbar/dist/snackbar.min.css" media="print" onload="this.media='all'"><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@fancyapps/ui/dist/fancybox/fancybox.min.css" media="print" onload="this.media='all'"><script>const GLOBAL_CONFIG = { 
  root: '/mundane/',
  algolia: undefined,
  localSearch: {"path":"/mundane/search.xml","languages":{"hits_empty":"找不到您查询的内容：${query}","hits_stats":"共找到 ${hits} 篇文章"}},
  translate: undefined,
  noticeOutdate: undefined,
  highlight: {"plugin":"highlighjs","highlightCopy":true,"highlightLang":true,"highlightHeightLimit":false},
  copy: {
    success: '复制成功',
    error: '复制错误',
    noSupport: '浏览器不支持'
  },
  relativeDate: {
    homepage: false,
    post: false
  },
  runtime: '',
  dateSuffix: {
    just: '刚刚',
    min: '分钟前',
    hour: '小时前',
    day: '天前',
    month: '个月前'
  },
  copyright: undefined,
  lightbox: 'fancybox',
  Snackbar: {"chs_to_cht":"你已切换为繁体","cht_to_chs":"你已切换为简体","day_to_night":"你已切换为深色模式","night_to_day":"你已切换为浅色模式","bgLight":"#49b1f5","bgDark":"#1f1f1f","position":"bottom-right"},
  source: {
    justifiedGallery: {
      js: 'https://cdn.jsdelivr.net/npm/flickr-justified-gallery/dist/fjGallery.min.js',
      css: 'https://cdn.jsdelivr.net/npm/flickr-justified-gallery/dist/fjGallery.min.css'
    }
  },
  isPhotoFigcaption: false,
  islazyload: true,
  isAnchor: false,
  percent: {
    toc: true,
    rightside: false,
  }
}</script><script id="config-diff">var GLOBAL_CONFIG_SITE = {
  title: 'hadoop学习',
  isPost: true,
  isHome: false,
  isHighlightShrink: false,
  isToc: true,
  postUpdate: '2023-04-28 08:21:17'
}</script><noscript><style type="text/css">
  #nav {
    opacity: 1
  }
  .justified-gallery img {
    opacity: 1
  }

  #recent-posts time,
  #post-meta time {
    display: inline !important
  }
</style></noscript><script>(win=>{
    win.saveToLocal = {
      set: function setWithExpiry(key, value, ttl) {
        if (ttl === 0) return
        const now = new Date()
        const expiryDay = ttl * 86400000
        const item = {
          value: value,
          expiry: now.getTime() + expiryDay,
        }
        localStorage.setItem(key, JSON.stringify(item))
      },

      get: function getWithExpiry(key) {
        const itemStr = localStorage.getItem(key)

        if (!itemStr) {
          return undefined
        }
        const item = JSON.parse(itemStr)
        const now = new Date()

        if (now.getTime() > item.expiry) {
          localStorage.removeItem(key)
          return undefined
        }
        return item.value
      }
    }
  
    win.getScript = url => new Promise((resolve, reject) => {
      const script = document.createElement('script')
      script.src = url
      script.async = true
      script.onerror = reject
      script.onload = script.onreadystatechange = function() {
        const loadState = this.readyState
        if (loadState && loadState !== 'loaded' && loadState !== 'complete') return
        script.onload = script.onreadystatechange = null
        resolve()
      }
      document.head.appendChild(script)
    })
  
    win.getCSS = (url,id = false) => new Promise((resolve, reject) => {
      const link = document.createElement('link')
      link.rel = 'stylesheet'
      link.href = url
      if (id) link.id = id
      link.onerror = reject
      link.onload = link.onreadystatechange = function() {
        const loadState = this.readyState
        if (loadState && loadState !== 'loaded' && loadState !== 'complete') return
        link.onload = link.onreadystatechange = null
        resolve()
      }
      document.head.appendChild(link)
    })
  
      win.activateDarkMode = function () {
        document.documentElement.setAttribute('data-theme', 'dark')
        if (document.querySelector('meta[name="theme-color"]') !== null) {
          document.querySelector('meta[name="theme-color"]').setAttribute('content', '#0d0d0d')
        }
      }
      win.activateLightMode = function () {
        document.documentElement.setAttribute('data-theme', 'light')
        if (document.querySelector('meta[name="theme-color"]') !== null) {
          document.querySelector('meta[name="theme-color"]').setAttribute('content', '#ffffff')
        }
      }
      const t = saveToLocal.get('theme')
    
          if (t === 'dark') activateDarkMode()
          else if (t === 'light') activateLightMode()
        
      const asideStatus = saveToLocal.get('aside-status')
      if (asideStatus !== undefined) {
        if (asideStatus === 'hide') {
          document.documentElement.classList.add('hide-aside')
        } else {
          document.documentElement.classList.remove('hide-aside')
        }
      }
    
    const detectApple = () => {
      if(/iPad|iPhone|iPod|Macintosh/.test(navigator.userAgent)){
        document.documentElement.classList.add('apple')
      }
    }
    detectApple()
    })(window)</script><svg aria-hidden="true" style="position:absolute; overflow:hidden; width:0; height:0"><symbol id="icon-sun" viewBox="0 0 1024 1024"><path d="M960 512l-128 128v192h-192l-128 128-128-128H192v-192l-128-128 128-128V192h192l128-128 128 128h192v192z" fill="#FFD878" p-id="8420"></path><path d="M736 512a224 224 0 1 0-448 0 224 224 0 1 0 448 0z" fill="#FFE4A9" p-id="8421"></path><path d="M512 109.248L626.752 224H800v173.248L914.752 512 800 626.752V800h-173.248L512 914.752 397.248 800H224v-173.248L109.248 512 224 397.248V224h173.248L512 109.248M512 64l-128 128H192v192l-128 128 128 128v192h192l128 128 128-128h192v-192l128-128-128-128V192h-192l-128-128z" fill="#4D5152" p-id="8422"></path><path d="M512 320c105.888 0 192 86.112 192 192s-86.112 192-192 192-192-86.112-192-192 86.112-192 192-192m0-32a224 224 0 1 0 0 448 224 224 0 0 0 0-448z" fill="#4D5152" p-id="8423"></path></symbol><symbol id="icon-moon" viewBox="0 0 1024 1024"><path d="M611.370667 167.082667a445.013333 445.013333 0 0 1-38.4 161.834666 477.824 477.824 0 0 1-244.736 244.394667 445.141333 445.141333 0 0 1-161.109334 38.058667 85.077333 85.077333 0 0 0-65.066666 135.722666A462.08 462.08 0 1 0 747.093333 102.058667a85.077333 85.077333 0 0 0-135.722666 65.024z" fill="#FFB531" p-id="11345"></path><path d="M329.728 274.133333l35.157333-35.157333a21.333333 21.333333 0 1 0-30.165333-30.165333l-35.157333 35.157333-35.114667-35.157333a21.333333 21.333333 0 0 0-30.165333 30.165333l35.114666 35.157333-35.114666 35.157334a21.333333 21.333333 0 1 0 30.165333 30.165333l35.114667-35.157333 35.157333 35.157333a21.333333 21.333333 0 1 0 30.165333-30.165333z" fill="#030835" p-id="11346"></path></symbol></svg><meta name="generator" content="Hexo 6.3.0"><link rel="alternate" href="/mundane/atom.xml" title="mundane" type="application/atom+xml">
</head><body><div id="web_bg"></div><div id="sidebar"><div id="menu-mask"></div><div id="sidebar-menus"><div class="avatar-img is-center"><img src="/mundane/img/violet.jpg" onerror="onerror=null;src='/img/friend_404.gif'" alt="avatar"/></div><div class="sidebar-site-data site-data is-center"><a href="/mundane/archives/"><div class="headline">文章</div><div class="length-num">4</div></a><a href="/mundane/tags/"><div class="headline">标签</div><div class="length-num">8</div></a><a href="/mundane/categories/"><div class="headline">分类</div><div class="length-num">4</div></a></div><hr/><div class="menus_items"><div class="menus_item"><a class="site-page" href="/mundane/"><i class="fa-fw fas fa-home"></i><span> 首页</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/archives/"><i class="fa-fw fas fa-archive"></i><span> 归档</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/tags/"><i class="fa-fw fas fa-tags"></i><span> 标签</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/categories/"><i class="fa-fw fas fa-folder-open"></i><span> 分类</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/messageboard/"><i class="fa-fw fa fa-paper-plane"></i><span> 留言板</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/link/"><i class="fa-fw fas fa-link"></i><span> 友链</span></a></div><div class="menus_item"><a class="site-page group" href="javascript:void(0);"><i class="fa-fw fa fa-list"></i><span> 菜单</span><i class="fas fa-chevron-down"></i></a><ul class="menus_item_child"><li><a class="site-page child" href="/mundane/myself/"><i class="fa-fw fa fa-id-card"></i><span> myself</span></a></li><li><a class="site-page child" target="_blank" rel="noopener" href="https://github.com/jerryc127/hexo-theme-butterfly/"><i class="fa-fw fa fa-heart"></i><span> butterfly主题</span></a></li></ul></div></div></div></div><div class="post" id="body-wrap"><header class="post-bg" id="page-header" style="background-image: url('https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/img/67190321_p0.png')"><nav id="nav"><span id="blog-info"><a href="/mundane/" title="mundane"><span class="site-name">mundane</span></a></span><div id="menus"><div id="search-button"><a class="site-page social-icon search" href="javascript:void(0);"><i class="fas fa-search fa-fw"></i><span> 搜索</span></a></div><div class="menus_items"><div class="menus_item"><a class="site-page" href="/mundane/"><i class="fa-fw fas fa-home"></i><span> 首页</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/archives/"><i class="fa-fw fas fa-archive"></i><span> 归档</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/tags/"><i class="fa-fw fas fa-tags"></i><span> 标签</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/categories/"><i class="fa-fw fas fa-folder-open"></i><span> 分类</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/messageboard/"><i class="fa-fw fa fa-paper-plane"></i><span> 留言板</span></a></div><div class="menus_item"><a class="site-page" href="/mundane/link/"><i class="fa-fw fas fa-link"></i><span> 友链</span></a></div><div class="menus_item"><a class="site-page group" href="javascript:void(0);"><i class="fa-fw fa fa-list"></i><span> 菜单</span><i class="fas fa-chevron-down"></i></a><ul class="menus_item_child"><li><a class="site-page child" href="/mundane/myself/"><i class="fa-fw fa fa-id-card"></i><span> myself</span></a></li><li><a class="site-page child" target="_blank" rel="noopener" href="https://github.com/jerryc127/hexo-theme-butterfly/"><i class="fa-fw fa fa-heart"></i><span> butterfly主题</span></a></li></ul></div></div><div id="toggle-menu"><a class="site-page" href="javascript:void(0);"><i class="fas fa-bars fa-fw"></i></a></div></div></nav><div id="post-info"><h1 class="post-title">hadoop学习</h1><div id="post-meta"><div class="meta-firstline"><span class="post-meta-date"><i class="far fa-calendar-alt fa-fw post-meta-icon"></i><span class="post-meta-label">发表于</span><time class="post-meta-date-created" datetime="2023-04-27T14:47:36.000Z" title="发表于 2023-04-27 22:47:36">2023-04-27</time><span class="post-meta-separator">|</span><i class="fas fa-history fa-fw post-meta-icon"></i><span class="post-meta-label">更新于</span><time class="post-meta-date-updated" datetime="2023-04-28T00:21:17.747Z" title="更新于 2023-04-28 08:21:17">2023-04-28</time></span><span class="post-meta-categories"><span class="post-meta-separator">|</span><i class="fas fa-inbox fa-fw post-meta-icon"></i><a class="post-meta-categories" href="/mundane/categories/%E5%A4%A7%E6%95%B0%E6%8D%AE/">大数据</a></span></div><div class="meta-secondline"><span class="post-meta-separator">|</span><span class="post-meta-wordcount"><i class="far fa-file-word fa-fw post-meta-icon"></i><span class="post-meta-label">字数总计:</span><span class="word-count">6.8k</span><span class="post-meta-separator">|</span><i class="far fa-clock fa-fw post-meta-icon"></i><span class="post-meta-label">阅读时长:</span><span>27分钟</span></span><span class="post-meta-separator">|</span><span class="post-meta-pv-cv" id="" data-flag-title="hadoop学习"><i class="far fa-eye fa-fw post-meta-icon"></i><span class="post-meta-label">阅读量:</span><span id="busuanzi_value_page_pv"><i class="fa-solid fa-spinner fa-spin"></i></span></span></div></div></div></header><main class="layout" id="content-inner"><div id="post"><article class="post-content" id="article-container"><h1 id="Hadoop"><a href="#Hadoop" class="headerlink" title="Hadoop"></a>Hadoop</h1><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230415204747344.png" alt="image-20230415204747344"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230415204803270.png" alt="image-20230415204803270"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230415205039745.png" alt="image-20230415205039745"></p>
<h2 id="HDFS"><a href="#HDFS" class="headerlink" title="HDFS"></a>HDFS</h2><p>HDFS Hadoop分布式文件系统</p>
<p>分布式软件应该具备的特性：</p>
<p>1.<strong>分布式存储</strong>的优点？<strong>无限扩展</strong>支撑海量数据存储</p>
<p>2.<strong>元数据记录</strong>的功能？快速<strong>定位文件</strong>位置便于查找</p>
<p>3.文件<strong>分块存储</strong>的好处是什么？针对块<strong>并行操作</strong>提高效率</p>
<p>4.设置<strong>副本备份</strong>的作用是什么？冗余存储保障<strong>数据安全</strong></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422105043508.png" alt="image-20230422105043508"></p>
<h3 id="文件系统协议"><a href="#文件系统协议" class="headerlink" title="文件系统协议"></a>文件系统协议</h3><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422110309360.png" alt="image-20230422110309360"></p>
<h3 id="各角色职责"><a href="#各角色职责" class="headerlink" title="各角色职责"></a>各角色职责</h3><h4 id="主角色：NameNode"><a href="#主角色：NameNode" class="headerlink" title="主角色：NameNode"></a>主角色：NameNode</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422111020934.png" alt="image-20230422111020934"></p>
<ul>
<li>NameNode内部通过<strong>内存</strong>和<strong>磁盘文件</strong>两种方式管理元数据</li>
<li>其中磁盘上的元数据文件包括Fsimage内存元数据镜像文件edits log（Journal）编辑日志。</li>
</ul>
<h4 id="从角色：DataNode"><a href="#从角色：DataNode" class="headerlink" title="从角色：DataNode"></a>从角色：DataNode</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422111719328.png" alt="image-20230422111719328"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422111837341.png" alt="image-20230422111837341"></p>
<h4 id="Namenode职责"><a href="#Namenode职责" class="headerlink" title="Namenode职责"></a>Namenode职责</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422112008977.png" alt="image-20230422112008977"></p>
<h4 id="DataNode职责"><a href="#DataNode职责" class="headerlink" title="DataNode职责"></a>DataNode职责</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422112115293.png" alt="image-20230422112115293"></p>
<h3 id="工作流程与机制"><a href="#工作流程与机制" class="headerlink" title="工作流程与机制"></a>工作流程与机制</h3><h4 id="写数据"><a href="#写数据" class="headerlink" title="写数据"></a>写数据</h4><p>Pipeline管道</p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422112636764.png" alt="image-20230422112636764"></p>
<p>ack应答响应</p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422113100254.png" alt="image-20230422113100254"></p>
<p>默认3副本存储策略</p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422113310108.png" alt="image-20230422113310108"></p>
<h4 id="流程总结"><a href="#流程总结" class="headerlink" title="流程总结"></a>流程总结</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422113748310.png" alt="image-20230422113748310"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422113826105.png" alt="image-20230422113826105"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422113923689.png" alt="image-20230422113923689"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422113951427.png" alt="image-20230422113951427"></p>
<h2 id="MapReduce"><a href="#MapReduce" class="headerlink" title="MapReduce"></a>MapReduce</h2><p>是一个分布式计算框架</p>
<p>分而治之，先拆分（Map）再合并（Reduce）</p>
<p>MapReduce处理的数据类型是**&lt;key,value&gt;键值对**</p>
<h3 id="特点"><a href="#特点" class="headerlink" title="特点"></a>特点</h3><ul>
<li>易于编程</li>
<li>良好的扩展性</li>
<li>高容错性</li>
<li>适合海量数据的离线处理</li>
</ul>
<h3 id="局限性"><a href="#局限性" class="headerlink" title="局限性"></a>局限性</h3><ul>
<li>实时计算性能差</li>
<li>不能进行流式计算</li>
</ul>
<h3 id="分布式计算"><a href="#分布式计算" class="headerlink" title="分布式计算"></a>分布式计算</h3><p>将应用分解成许多小的部分。分配给多台计算机进行处理</p>
<h3 id="实例进程"><a href="#实例进程" class="headerlink" title="实例进程"></a>实例进程</h3><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422115208243.png" alt="image-20230422115208243"></p>
<h3 id="阶段组成"><a href="#阶段组成" class="headerlink" title="阶段组成"></a>阶段组成</h3><p>一个MapReduce编程模型中<strong>只能包含一个Map阶段和一个Reduce阶段，或者只有Map阶段</strong></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422124444233.png" alt="image-20230422124444233"></p>
<h4 id="Map阶段"><a href="#Map阶段" class="headerlink" title="Map阶段"></a>Map阶段</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422125522188.png" alt="image-20230422125522188"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422124230260.png" alt="image-20230422124230260"></p>
<h4 id="Reduce阶段"><a href="#Reduce阶段" class="headerlink" title="Reduce阶段"></a>Reduce阶段</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422125422516.png" alt="image-20230422125422516"></p>
<h4 id="shuffle"><a href="#shuffle" class="headerlink" title="shuffle"></a>shuffle</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422125008789.png" alt="image-20230422125008789"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422125030715.png" alt="image-20230422125030715"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422125133203.png" alt="image-20230422125133203"></p>
<h2 id="YARN"><a href="#YARN" class="headerlink" title="YARN"></a>YARN</h2><p>是一个<strong>通用资源管理系统</strong>和<strong>调度平台</strong></p>
<p>资源管理系统：集群的硬件资源，和程序运行相关，比如内存、CPU等</p>
<p>调度平台：多个程序同时申请资源如何分配，调度的规则（算法）</p>
<p>通用：支持各种计算程序</p>
<h3 id="架构"><a href="#架构" class="headerlink" title="架构"></a>架构</h3><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422130526393.png" alt="image-20230422130526393"></p>
<ul>
<li><strong>ResourceManager（RM） 集群物理层面</strong><ul>
<li>YARN集群中的主角色，决定系统中所有应用程序之间<strong>资源分配的最终权限，即最终仲裁者</strong></li>
<li>接收用户的作业提交，并通过NM分配、管理各个机器上的计算资源</li>
</ul>
</li>
<li><strong>NodeManager（NM）     集群物理层面</strong><ul>
<li>YARN中的从角色，一台机器上一个，负责本机器上的计算资源</li>
<li>根据RM命令，启动Container容器，监视容器的资源使用情况，并且向RM主角色汇报资源使用情况</li>
</ul>
</li>
<li><strong>ApplicationMaster（AM） App层面</strong><ul>
<li>用户提交的每个应用程序均包含一个AM</li>
<li>应用程序内的“老大”，负责程序内部各阶段的资源申请，监督程序的执行情况</li>
</ul>
</li>
</ul>
<p>Client</p>
<p>Container容器（资源的抽象）</p>
<h3 id="核心交互流程"><a href="#核心交互流程" class="headerlink" title="核心交互流程"></a>核心交互流程</h3><ul>
<li>MR作业提交  Client—&gt;RM</li>
<li>资源的申请    MrAppMaster—&gt;RM</li>
<li>MR作业状态汇报 Container（Map|Reduce Task）—&gt;Container（ MrAppMaster）</li>
<li>节点的状态汇报 NM—&gt;RM</li>
</ul>
<p>当用户向YARN中提交一个应用程序后，YARN将分两个阶段运行该程序。</p>
<ul>
<li>第一个阶段是客户端申请资源启动运行本次程序的ApplicationMaster</li>
<li>第二个阶段是由ApplicationMaster根据本次程序内部具体情况，为它申请资源，并监控它整个运行过程，直到运行完成</li>
</ul>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422132258505.png" alt="image-20230422132258505"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422132232429.png" alt="image-20230422132232429"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422132347304.png" alt="image-20230422132347304"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422132401530.png" alt="image-20230422132401530"></p>
<h3 id="资源调度器Scheduler"><a href="#资源调度器Scheduler" class="headerlink" title="资源调度器Scheduler"></a>资源调度器Scheduler</h3><p>资源是有限的</p>
<p>根据一些定义的策略为应用程序分配资源</p>
<h4 id="FIFO-Scheduler（先进先出调度器）"><a href="#FIFO-Scheduler（先进先出调度器）" class="headerlink" title="FIFO Scheduler（先进先出调度器）"></a>FIFO Scheduler（先进先出调度器）</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422143824616.png" alt="image-20230422143824616"></p>
<h4 id="Capacity-Scheduler（容量调度器）"><a href="#Capacity-Scheduler（容量调度器）" class="headerlink" title="Capacity Scheduler（容量调度器）"></a>Capacity Scheduler（容量调度器）</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422144224626.png" alt="image-20230422144224626"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422144312368.png" alt="image-20230422144312368"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422144336007.png" alt="image-20230422144336007"></p>
<h4 id="Fair-Scheduler（公平调度器）"><a href="#Fair-Scheduler（公平调度器）" class="headerlink" title="Fair Scheduler（公平调度器）"></a>Fair Scheduler（公平调度器）</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422144440785.png" alt="image-20230422144440785"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422144548273.png" alt="image-20230422144548273"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422144656064.png" alt="image-20230422144656064"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422144728710.png" alt="image-20230422144728710"></p>
<table>
<thead>
<tr>
<th>Scheduler</th>
<th>优势</th>
<th>坏处</th>
</tr>
</thead>
<tbody><tr>
<td>FIFO</td>
<td>无需配置，先到先得、易于执行</td>
<td>任务的优先级不会变高，因此高优先级的作业需要等待。不适合共享集群</td>
</tr>
<tr>
<td>Capacity</td>
<td>层次化的队列设计；容量保证；安全；弹性分配</td>
<td></td>
</tr>
<tr>
<td>Fair</td>
<td>分层队列；基于用户或组的队列映射；资源抢占；保证最小配额；允许资源共享；默认不限制每个队列和用户可以同时运行应用的数量</td>
<td></td>
</tr>
</tbody></table>
<h2 id="Apache-Hive"><a href="#Apache-Hive" class="headerlink" title="Apache Hive"></a>Apache Hive</h2><ul>
<li><strong>数据仓库</strong>（Data Warehouse，简称<strong>数仓</strong>，<strong>DW</strong>），是一个<strong>用于存储、分析、报告的数据系统。</strong></li>
<li>数据仓库的目的是构建<strong>面向分析</strong>的集成化数据环境，分析结果为企业提供决策支持。</li>
</ul>
<p>数据仓库本身并不“生产”任何数据，其数据来源于不同外部系统</p>
<p>数据仓库也不需要“消费”任何的数据，其结果开放给各个外部应用使用</p>
<p><strong>联机事务处理系统（OLTP）</strong>：主要任务是执行联机事务处理。基本特征是前台接受的用户数据可以立即传送到后台进行处理，并在很短的时间内给出处理结果。 例如中国人寿保险公司业务</p>
<p><strong>关系型数据库（RDBMS）是OLTP典型应用</strong>，比如：Oracle、Mysql、SQL Server等。</p>
<p>基于业务数据开展数据分析，基于分析的结果给决策提供支撑，数据驱动决策制定。</p>
<p>数据分析平台：面向分析，支持分析，并且和OLTP系统解耦合</p>
<h3 id="数仓主要特征"><a href="#数仓主要特征" class="headerlink" title="数仓主要特征"></a>数仓主要特征</h3><p><strong>面向主题性</strong>（Subject-Oriented）</p>
<p>主题是一个抽象的概念，是较高层次上数据综合、归类并进行分析利用的抽象</p>
<p><strong>集成性</strong>（Integrated）</p>
<p>主题相关的数据通常会分布在多个操作性系统中，彼此分散、独立、异构。需要集成到数仓主题下</p>
<ul>
<li>在数据进入数据仓库之前，必然要经过<strong>统一与综合，对数据进行抽取、清理、转换和汇总</strong>（ETL:抽取、转换、加载）<ul>
<li>统一源数据中所有矛盾之处<ul>
<li>如字段的同名异义、异名同义、单位不统一、字长不一致等</li>
</ul>
</li>
<li>进行数据综合和计算</li>
</ul>
</li>
</ul>
<p><strong>非易失性</strong>（Non-Volatile）</p>
<p>也叫非易变性。数据仓库是分析数据的平台，而不是创造数据的平台。</p>
<ul>
<li>数据仓库的数据反映的是一段相当长的时间内历史数据的内容</li>
<li>数据仓库中一般有大量的查询操作，但修改和删除操作很少</li>
</ul>
<p><strong>时变性</strong>（Time-Variant）</p>
<p>数据仓库的数据需要随着时间更新，以适应决策的需要。</p>
<h3 id="SQL"><a href="#SQL" class="headerlink" title="SQL"></a>SQL</h3><p>结构化查询语言（Structured Query Language）简称SQL，是一种数据库查询和程序设计语言，用于存取数据以及<strong>查询</strong>、<strong>更新</strong>和<strong>管理</strong>数据。</p>
<p>结构化数据：由二维表结构来逻辑表达和实现的数据</p>
<h4 id="SQL语法分类"><a href="#SQL语法分类" class="headerlink" title="SQL语法分类"></a>SQL语法分类</h4><p><strong>数据定义语言（DDL）</strong></p>
<p>创建或删除表</p>
<p>CREATE DATABASE - 创建新数据库</p>
<p>CREATE TABLE - 创建新表</p>
<p><strong>数据操纵语言（DML）</strong></p>
<p>针对表中的数据进行<strong>插入、更新、删除、查询</strong>操作</p>
<p>SELECT - 从数据库表中获取数据</p>
<p>UPDATE - 更新数据库表中的数据</p>
<p>DELETE - 从数据库表中删除数据</p>
<p>INSERT - 向数据库表中插入数据</p>
<p>Apache Hive是一款建立在Hadoop上的开源<strong>数据仓库</strong>系统，可以将存储在Hadoop文件中的<strong>结构化、半结构化数据文件映射为一张数据库表</strong>，基于表提供了一种类似SQL的查询模型，称为Hive查询语言（HQL），用于访问和分析存储在Hadoop文件中的大型数据集。</p>
<p>Hive核心是将<strong>HQL转换为MapReduce</strong>程序，然后将程序提交到Hadoop集群执行</p>
<h4 id="Hive和Hadoop关系"><a href="#Hive和Hadoop关系" class="headerlink" title="Hive和Hadoop关系"></a>Hive和Hadoop关系</h4><p>Hive利用HDFS存储数据，利用MapReduce查询分析数据</p>
<h4 id="对Hive的理解"><a href="#对Hive的理解" class="headerlink" title="对Hive的理解"></a>对Hive的理解</h4><p>Hive能将数据文件映射成为一张表，这个映射是指<strong>文件和表之间的对应关系</strong></p>
<p>Hive软件本身承担了<strong>SQL语法解析编译成为MapReduce</strong>的功能职责</p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153001818.png" alt="image-20230422153001818"></p>
<h3 id="Hive架构图"><a href="#Hive架构图" class="headerlink" title="Hive架构图"></a>Hive架构图</h3><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153127347.png" alt="image-20230422153127347"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153200814.png" alt="image-20230422153200814"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153216300.png" alt="image-20230422153216300"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153244627.png" alt="image-20230422153244627"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153309553.png" alt="image-20230422153309553"></p>
<h4 id="元数据"><a href="#元数据" class="headerlink" title="元数据"></a>元数据</h4><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153416479.png" alt="image-20230422153416479"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153453586.png" alt="image-20230422153453586"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153507939.png" alt="image-20230422153507939"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153536878.png" alt="image-20230422153536878"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422153638533.png" alt="image-20230422153638533"></p>
<h3 id="Hive部署"><a href="#Hive部署" class="headerlink" title="Hive部署"></a>Hive部署</h3><p><strong>服务器基础环境</strong></p>
<p>集群时间同步、防火墙关闭、主机Host映射、免密登录、JDK安装</p>
<p><strong>Hadoop集群健康可用</strong></p>
<p>启动Hive之前必须先启动Hadoop集群。特别要注意，需<strong>等待HDFS安全模式关闭之后再启动运行Hive</strong></p>
<h4 id="整合Hive"><a href="#整合Hive" class="headerlink" title="整合Hive"></a>整合Hive</h4><p>用户代理设置</p>
<figure class="highlight xml"><table><tr><td class="code"><pre><span class="line"><span class="tag">&lt;<span class="name">property</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">name</span>&gt;</span>hadoop.proxyuser.root.hosts<span class="tag">&lt;/<span class="name">name</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">value</span>&gt;</span>*<span class="tag">&lt;/<span class="name">value</span>&gt;</span></span><br><span class="line"><span class="tag">&lt;/<span class="name">property</span>&gt;</span></span><br><span class="line"></span><br><span class="line"><span class="tag">&lt;<span class="name">property</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">name</span>&gt;</span>hadoop.proxyuser.root.groups<span class="tag">&lt;/<span class="name">name</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">value</span>&gt;</span>*<span class="tag">&lt;/<span class="name">value</span>&gt;</span></span><br><span class="line"><span class="tag">&lt;/<span class="name">property</span>&gt;</span></span><br></pre></td></tr></table></figure>

<h4 id="安装mysql"><a href="#安装mysql" class="headerlink" title="安装mysql"></a>安装mysql</h4><p>卸载Centos7自带的mariadb</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">rpm -qa|grep mariadb</span><br></pre></td></tr></table></figure>

<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">- rpm -e mariadb-libs-5.5.68-1.el7.x86_64</span><br><span class="line">+ rpm -e --nodeps mariadb-libs-5.5.68-1.el7.x86_64</span><br></pre></td></tr></table></figure>

<p>安装mysql</p>
<p>mkdir &#x2F;export&#x2F;software&#x2F;mysql</p>
<p>– 上传mysql-5.7.36-1.el6.x86_64.rpm-bundle.tar到该文件夹解压</p>
<p>tar -xvf mysql-5.7.36-1.el6.x86_64.rpm-bundle.tar</p>
<p>yum -y install libaio</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">rpm -ivh mysql-community-common-5.7.36-1.el6.x86_64.rpm mysql-community-libs-5.7.36-1.el6.x86_64.rpm mysql-community-client-5.7.36-1.el6.x86_64.rpm mysql-community-server-5.7.36-1.el6.x86_64.rpm --force --nodeps</span><br></pre></td></tr></table></figure>

<p><strong>可以添加参数，先强制安装 –force –nodeps</strong></p>
<p>common、libs、client、server</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line"># 初始化</span><br><span class="line">mysqld --initialize</span><br><span class="line"></span><br><span class="line"># 更改所属组</span><br><span class="line">chown mysql:mysql /var/lib/mysql -R</span><br><span class="line"></span><br><span class="line"># 启动mysql</span><br><span class="line">systemctl start mysqld.service</span><br><span class="line"></span><br><span class="line"># 查看生成的临时root访问</span><br><span class="line">cat /var/log/mysqld.log</span><br></pre></td></tr></table></figure>

<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422164127723.png" alt="image-20230422164127723"></p>
<p>su9OG&#x3D;isy#U&lt;</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line"># 更新root密码 设置为123</span><br><span class="line">mysql&gt; alter user user() identified by &quot;123&quot;;</span><br><span class="line"></span><br><span class="line"># 授权</span><br><span class="line">mysql&gt; use mysql;</span><br><span class="line"></span><br><span class="line">mysql&gt; GRANT ALL PRIVILEGES ON *.* TO &#x27;root&#x27;@&#x27;%&#x27; IDENTIFIED BY &#x27;123&#x27; WITH GRANT OPTION;</span><br><span class="line"></span><br><span class="line">mysql&gt; FLUSH PRIVILEGES;</span><br><span class="line"></span><br><span class="line"># mysql的启动和关闭 状态查看</span><br><span class="line">systemctl stop mysqld</span><br><span class="line">systemctl status mysqld</span><br><span class="line">systemctl start mysqld</span><br><span class="line"></span><br><span class="line"># 建议设置为开机自启动服务</span><br><span class="line">systemctl enable mysqld</span><br><span class="line"></span><br><span class="line"># 查看是否已经设置自启动成功</span><br><span class="line">systemctl list-unit-files | grep mysqld</span><br></pre></td></tr></table></figure>

<p>Centos7 干净卸载mysql 5.7</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line"># 关闭mysql服务</span><br><span class="line">systemctl stop mysqld.service</span><br><span class="line"></span><br><span class="line"># 查找安装mysql的rpm包</span><br></pre></td></tr></table></figure>

<p>安装Hive</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">tar zxvf apache-hive-3.1.2-bin.tar.gz -C /export/server</span><br></pre></td></tr></table></figure>

<p>解决Hive与Hadoop之间guava版本差异</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">cd /export/server/apache-hive-3.1.2-bin</span><br><span class="line">rm -rf lib/guava-19.0.jar</span><br><span class="line">cp /export/server/hadoop-3.3.5/share/hadoop/common/lib/guava-27.0-jre.jar ./lib/</span><br></pre></td></tr></table></figure>

<p>修改配置文件</p>
<ul>
<li>hive-env.sh</li>
</ul>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">cd /export/server/apache-hive-3.1.2-bin/conf</span><br><span class="line">mv hive-env.sh.template hive-env.sh</span><br><span class="line"></span><br><span class="line">vim hive-env.sh</span><br><span class="line">export HADOOP_HOME=/export/server/hadoop-3.3.5</span><br><span class="line">export HIVE_CONF_DIR=/export/server/apache-hive-3.1.2-bin/conf</span><br><span class="line">export HIVE_AUX_JARS_PATH=/export/server/apache-hive-3.1.2-bin/lib</span><br></pre></td></tr></table></figure>

<ul>
<li>hive-site.xml</li>
</ul>
<p>vim hive-site.xml</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">&lt;configuration&gt;</span><br><span class="line">  &lt;property&gt;</span><br><span class="line">    &lt;name&gt;javax.jdo.option.ConnectionURL&lt;/name&gt;</span><br><span class="line">    &lt;value&gt;jdbc:mysql://node1:3306/hive?createDatabaseIfNotExist=true&amp;amp;useSSL=false&amp;amp;useUnicode=true&amp;amp;characterEncoding=UTF-8&lt;/value&gt;</span><br><span class="line">  &lt;/property&gt;</span><br><span class="line"></span><br><span class="line">  &lt;property&gt;</span><br><span class="line">    &lt;name&gt;javax.jdo.option.ConnectionDriverName&lt;/name&gt;</span><br><span class="line">    &lt;value&gt;com.mysql.jdbc.Driver&lt;/value&gt;</span><br><span class="line">  &lt;/property&gt;</span><br><span class="line"></span><br><span class="line">  &lt;property&gt;</span><br><span class="line">    &lt;name&gt;javax.jdo.option.ConnectionUserName&lt;/name&gt;</span><br><span class="line">    &lt;value&gt;root&lt;/value&gt;</span><br><span class="line">  &lt;/property&gt;</span><br><span class="line"></span><br><span class="line">  &lt;property&gt;</span><br><span class="line">    &lt;name&gt;javax.jdo.option.ConnectionPassword&lt;/name&gt;</span><br><span class="line">    &lt;value&gt;root&lt;/value&gt;</span><br><span class="line">  &lt;/property&gt;</span><br><span class="line"></span><br><span class="line">  &lt;property&gt;</span><br><span class="line">    &lt;name&gt;hive.server2.thrift.bind.host&lt;/name&gt;</span><br><span class="line">    &lt;value&gt;node1&lt;/value&gt;</span><br><span class="line">  &lt;/property&gt;</span><br><span class="line"></span><br><span class="line">  &lt;property&gt;</span><br><span class="line">    &lt;name&gt;hive.metastore.uris&lt;/name&gt;</span><br><span class="line">    &lt;value&gt;thrift://node1:9083&lt;/value&gt;</span><br><span class="line">  &lt;/property&gt;</span><br><span class="line"></span><br><span class="line">  &lt;property&gt;</span><br><span class="line">    &lt;name&gt;hive.metastore.event.db.notification.api.auth&lt;/name&gt;</span><br><span class="line">    &lt;value&gt;false&lt;/value&gt;</span><br><span class="line">  &lt;/property&gt;</span><br><span class="line"></span><br><span class="line">&lt;/configuration&gt;</span><br></pre></td></tr></table></figure>

<ul>
<li>上传mysql jdbc驱动到hive安装包lib下</li>
</ul>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">mysql-connector-java-5.1.32.jar</span><br></pre></td></tr></table></figure>

<ul>
<li>初始化元数据</li>
</ul>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">cd /export/server/apache-hive-3.1.2-bin</span><br><span class="line"></span><br><span class="line">bin/schematool -initSchema -dbType mysql -verbos</span><br><span class="line"># 初始化成功会在mysql中创建74张表</span><br></pre></td></tr></table></figure>

<ul>
<li>在hdfs创建hive存储目录（如存在则不用操作）</li>
</ul>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">hadoop fs -mkdir /tmp</span><br><span class="line">hadoop fs -mkdir -p /user/hive/warehouse</span><br><span class="line">hadoop fs -chmod g+w /tmp</span><br><span class="line">hadoop fs -chmod g+w /user/hive/warehouse</span><br></pre></td></tr></table></figure>

<h3 id="启动Hive"><a href="#启动Hive" class="headerlink" title="启动Hive"></a>启动Hive</h3><p>前台启动</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">/export/server/apache-hive-3.1.2-bin/bin/hive --service metastore</span><br></pre></td></tr></table></figure>

<p>后台启动</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">nohup /export/server/apache-hive-3.1.2-bin/bin/hive --service metastore &amp;</span><br></pre></td></tr></table></figure>

<h3 id="Hive客户端使用"><a href="#Hive客户端使用" class="headerlink" title="Hive客户端使用"></a>Hive客户端使用</h3><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422175524602.png" alt="image-20230422175524602"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422175722862.png" alt="image-20230422175722862"></p>
<h4 id="bin-x2F-beeline客户端使用"><a href="#bin-x2F-beeline客户端使用" class="headerlink" title="bin&#x2F;beeline客户端使用"></a>bin&#x2F;beeline客户端使用</h4><p>在hive安装的服务器上，首先启动metastore服务，然后启动hiveserver2服务</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">nohup &#x27;/export/server/apache-hive-3.1.2-bin/bin/hive --service metastore&#x27; &amp;</span><br><span class="line">nohup &#x27;/export/server/apache-hive-3.1.2-bin/bin/hive --service hiveserver2&#x27; &amp;</span><br></pre></td></tr></table></figure>

<ul>
<li>连接访问</li>
</ul>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">/export/server/apache-hive-3.1.2-bin/bin/beeline</span><br><span class="line"></span><br><span class="line">beeline&gt; ! connect jdbc:hive2://node1:10000</span><br></pre></td></tr></table></figure>

<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422183128693.png" alt="image-20230422183128693"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422183206363.png" alt="image-20230422183206363"></p>
<h3 id="pycharm-amp-Hive"><a href="#pycharm-amp-Hive" class="headerlink" title="pycharm &amp; Hive"></a>pycharm &amp; Hive</h3><p>配置驱动程序</p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422192922327.png" alt="image-20230422192922327"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422192836116.png" alt="image-20230422192836116"></p>
<p>hive的数据库在hdfs上本质上就是一个文件夹</p>
<p>默认数据库的存放路径是HDFS的: <code>/user/hive/warehouse</code></p>
<h4 id="创建表的语法"><a href="#创建表的语法" class="headerlink" title="创建表的语法"></a>创建表的语法</h4><figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> [<span class="keyword">external</span>] <span class="keyword">table</span> [if <span class="keyword">not</span> <span class="keyword">exists</span>] table_name</span><br><span class="line"> [(col_name data_type [comment col_comment], ...)]</span><br><span class="line"> [comment talbe_comment]</span><br><span class="line"> [partitioned <span class="keyword">by</span> (col_name data_type [comment col_comment], ...)]</span><br><span class="line"> [clustered <span class="keyword">by</span> (col_name, col_name, ...)]</span><br><span class="line"> [sorted <span class="keyword">by</span> (col_name [<span class="keyword">asc</span><span class="operator">|</span><span class="keyword">desc</span>], ...) <span class="keyword">into</span> num_buckets buckets]</span><br><span class="line"> [<span class="type">row</span> format row_format]</span><br><span class="line"> [stored <span class="keyword">as</span> file_format]</span><br><span class="line"> [location hdfs_path]</span><br></pre></td></tr></table></figure>

<ul>
<li><code>external</code> 创建外部表</li>
<li><code>partitioned by</code> 分区表</li>
<li><code>clustered by</code> 分桶表</li>
<li><code>stored as</code> 存储格式</li>
<li><code>location</code> 存储位置</li>
</ul>
<h4 id="hive所支持的数据类型"><a href="#hive所支持的数据类型" class="headerlink" title="hive所支持的数据类型"></a>hive所支持的数据类型</h4><table>
<thead>
<tr>
<th>分类</th>
<th>类型</th>
<th>描述</th>
<th>字面量实例</th>
</tr>
</thead>
<tbody><tr>
<td>原始类型</td>
<td>boolean</td>
<td>true&#x2F;false</td>
<td>TRUE</td>
</tr>
<tr>
<td></td>
<td>tinyint</td>
<td>1字节的有符号整数: -127-128</td>
<td>1Y</td>
</tr>
<tr>
<td></td>
<td>smallint</td>
<td>2字节的有符号整数: -32768-32767</td>
<td>1S</td>
</tr>
<tr>
<td></td>
<td><font color="cpink">int</font></td>
<td>4字节的有符号整数</td>
<td>1</td>
</tr>
<tr>
<td></td>
<td>bigint</td>
<td>8字节的有符号整数</td>
<td>1L</td>
</tr>
<tr>
<td></td>
<td>float</td>
<td>4字节单精度浮点数</td>
<td>1.0</td>
</tr>
<tr>
<td></td>
<td><font color="cpink">double</font></td>
<td>8字节双精度浮点数</td>
<td>1.0</td>
</tr>
<tr>
<td></td>
<td>deicimal</td>
<td>任意精度的带符号小数</td>
<td>1.0</td>
</tr>
<tr>
<td></td>
<td><font color='cpink' >string</font></td>
<td>字符串, 可变长</td>
<td></td>
</tr>
<tr>
<td></td>
<td><font color='cpink'>varchar</font></td>
<td>变长字符串</td>
<td></td>
</tr>
<tr>
<td></td>
<td>char</td>
<td>固定长字符串</td>
<td></td>
</tr>
<tr>
<td></td>
<td>binary</td>
<td>字节数组</td>
<td></td>
</tr>
<tr>
<td></td>
<td><font color='cpink' >timestamp</font></td>
<td>时间戳, 毫秒精确度</td>
<td>122637267291</td>
</tr>
<tr>
<td></td>
<td><font color='cpink' >date</font></td>
<td>日期</td>
<td>‘2023-04-16’</td>
</tr>
<tr>
<td>复杂类型</td>
<td>array</td>
<td>有序的同类数据集合</td>
<td>array(1, 2)</td>
</tr>
<tr>
<td></td>
<td>map</td>
<td>key-val, key必须为原始2类型, val可以是任意类型</td>
<td>map(‘a’, 1, ‘b’, 2)</td>
</tr>
<tr>
<td></td>
<td>struct</td>
<td>字段集合, 类型可以不同</td>
<td></td>
</tr>
<tr>
<td></td>
<td>union</td>
<td>在有限取值范围内的一个值</td>
<td></td>
</tr>
</tbody></table>
<ul>
<li>基础表创建</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="comment">-- 在myhive中创建</span></span><br><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> my_test(</span><br><span class="line"> id <span class="type">int</span>,</span><br><span class="line">    name string,</span><br><span class="line">    gender string</span><br><span class="line">);</span><br><span class="line"></span><br><span class="line"><span class="comment">-- 如果在其他数据库中, 想要对myhive创建表</span></span><br><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> myhive.my_test_2(</span><br><span class="line"> id <span class="type">int</span></span><br><span class="line">);</span><br></pre></td></tr></table></figure>

<h4 id="Hive的表类型"><a href="#Hive的表类型" class="headerlink" title="Hive的表类型"></a>Hive的表类型</h4><p>在Hive中可以创建的表的类型有4种, 分别是:</p>
<ol>
<li>内部表</li>
<li>外部表</li>
<li>分区表</li>
<li>分桶表</li>
</ol>
<h5 id="内部表"><a href="#内部表" class="headerlink" title="内部表"></a>内部表</h5><figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> table_name ...</span><br></pre></td></tr></table></figure>

<p>未被<code>external</code>关键字修饰的是内部表, 即普通表. 内部表又称管理表, 内部表数据存储的位置由<code>hive.metastore.warehouse.dir</code>参数决定(默认是:<code>/user/hive/warehouse</code>), 删除内部表会直接<font color="cpink">删除元数据(metadata) 及存储数据</font>, 因此内部表不适合和其他工具共享数据</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="comment">-- 创建学生表(内部表)</span></span><br><span class="line"><span class="keyword">create</span> database if <span class="keyword">not</span> <span class="keyword">exists</span> myhive;</span><br><span class="line">use myhive;</span><br><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> if <span class="keyword">not</span> <span class="keyword">exists</span> stu(</span><br><span class="line">    id <span class="type">int</span>,</span><br><span class="line">    name string</span><br><span class="line">);</span><br><span class="line"><span class="keyword">insert</span> <span class="keyword">into</span> stu <span class="keyword">values</span> (<span class="number">1</span>, <span class="string">&#x27;李白&#x27;</span>), (<span class="number">2</span>, <span class="string">&#x27;张飞&#x27;</span>);</span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> stu;</span><br></pre></td></tr></table></figure>

<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line">hdfs dfs <span class="operator">-</span>ls <span class="operator">/</span><span class="keyword">user</span><span class="operator">/</span>hive<span class="operator">/</span>warehouse<span class="operator">/</span>myhive.db<span class="operator">/</span>stu</span><br><span class="line"></span><br><span class="line">hdfs dfs <span class="operator">-</span>cat <span class="operator">/</span><span class="keyword">user</span><span class="operator">/</span>hive<span class="operator">/</span>warehouse<span class="operator">/</span>myhive.db<span class="operator">/</span>stu<span class="operator">/</span><span class="number">000000</span>_0</span><br></pre></td></tr></table></figure>

<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422195335874.png" alt="image-20230422195335874"></p>
<ul>
<li>数据分隔符</li>
</ul>
<p>如上图的<code>/myhive.db/stu/000000_0</code>, 默认的数据分隔符是:<code>\001</code>, 明文不可见</p>
<ul>
<li>自行定义分隔符</li>
</ul>
<p>在创建表的时候可以自己设定</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> if <span class="keyword">not</span> <span class="keyword">exists</span> stu2(</span><br><span class="line"> id <span class="type">int</span>,</span><br><span class="line">    name string</span><br><span class="line">) <span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span>;</span><br><span class="line"><span class="comment">-- 表示以 \t 为分隔符</span></span><br><span class="line"><span class="keyword">insert</span> <span class="keyword">into</span> stu_2 <span class="keyword">values</span>(<span class="number">1</span>, <span class="string">&#x27;Lili&#x27;</span>), (<span class="number">2</span>, <span class="string">&#x27;mark&#x27;</span>);</span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> stu_2;</span><br></pre></td></tr></table></figure>

<h5 id="外部表"><a href="#外部表" class="headerlink" title="外部表"></a>外部表</h5><figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">external</span> <span class="keyword">table</span> table_name ... location ...</span><br></pre></td></tr></table></figure>

<p>被<code>external</code>关键字修饰的是外部表, 即管理表。</p>
<p>外部表是指表数据可以在任何位置, 通过<code>location</code>关键字指定. 数据存储的不同也代表了这个表在理念上并不是在Hive内部管理的, 而是可以随意临时链接到外部数据. 所以在删除外部表的时候, <font color='cpink'>仅仅是删除元数据(表的信息), 不会删除数据本身</font>.</p>
<blockquote>
<p>内部表和外部表对比</p>
</blockquote>
<table>
<thead>
<tr>
<th></th>
<th>创建</th>
<th>存储位置</th>
<th>删除数据</th>
<th>理念</th>
</tr>
</thead>
<tbody><tr>
<td>内部表</td>
<td>create table</td>
<td>hive管理, 默认&#x2F;user&#x2F;hive&#x2F;warehouse</td>
<td>· 删除元数据(表信息) · 删除数据</td>
<td>Hive管理表 持久使用</td>
</tr>
<tr>
<td>外部表</td>
<td>create external table</td>
<td>随意, location关键字指定</td>
<td>· 仅删除元数据(表信息) · 保留数据</td>
<td>临时链接 外部数据使用</td>
</tr>
</tbody></table>
<h6 id="1-先有表-后有数据"><a href="#1-先有表-后有数据" class="headerlink" title="1.先有表, 后有数据"></a>1.先有表, 后有数据</h6><p>先创建外部表, 再将数据文件移动到指定的<code>location</code>目录中</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">external</span> <span class="keyword">table</span> test_ex1(</span><br><span class="line">    id <span class="type">int</span>,</span><br><span class="line">    name string</span><br><span class="line">) <span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span> location <span class="string">&#x27;/tmp/test_ex1&#x27;</span>;</span><br></pre></td></tr></table></figure>

<h6 id="2-先有数据-后有表"><a href="#2-先有数据-后有表" class="headerlink" title="2.先有数据, 后有表"></a>2.先有数据, 后有表</h6><p>先创建一个<code>tmp/test_ex2</code>的目录, 并且将数据文件上传<code>hdfs dfs -put data.txt /tmp/test_ex2/</code>中</p>
<p>创建一个外部表, 并执行查询语句</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">external</span> <span class="keyword">table</span> test_ex2(</span><br><span class="line">    id <span class="type">int</span>,</span><br><span class="line">    name string</span><br><span class="line">) <span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span> location <span class="string">&#x27;/tmp/test_ex2&#x27;</span>;</span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> test_ex2;</span><br></pre></td></tr></table></figure>

<h5 id="内-x2F-外部表转换"><a href="#内-x2F-外部表转换" class="headerlink" title="内&#x2F;外部表转换"></a>内&#x2F;外部表转换</h5><p>查看表类型</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">desc</span> formatted table_name;</span><br></pre></td></tr></table></figure>

<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422210100172.png" alt="image-20230422210100172"></p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422210038734.png" alt="image-20230422210038734"></p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="comment">-- 内部表改外部表</span></span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> table_name <span class="keyword">set</span> tblproperties (&quot;EXTERNAL&quot;<span class="operator">=</span>&quot;TRUE&quot;);</span><br><span class="line"><span class="comment">-- 外部表该内部表</span></span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> table_name <span class="keyword">set</span> tblproperties (&quot;EXTERNAL&quot;<span class="operator">=</span>&quot;FALSE&quot;);</span><br></pre></td></tr></table></figure>

<h4 id="数据加载和导出"><a href="#数据加载和导出" class="headerlink" title="数据加载和导出"></a>数据加载和导出</h4><h5 id="数据加载-LOAD语法"><a href="#数据加载-LOAD语法" class="headerlink" title="数据加载-LOAD语法"></a>数据加载-LOAD语法</h5><figure class="highlight sql"><table><tr><td class="code"><pre><span class="line">load data [<span class="keyword">local</span>] inpath <span class="string">&#x27;linux本地 or hdfs&#x27;</span> [overwrite] <span class="keyword">into</span> <span class="keyword">table</span> table_name;</span><br></pre></td></tr></table></figure>

<p>从外部将数据加载到Hive中</p>
<p>准备数据文件, 在linux本地创建<code>search.txt</code>数据文件</p>
<blockquote>
<p>本地文件系统指的是Hiveserver2服务所在机器的本地Linux文件系统，不是Hive客户端所在的本地文件系统。</p>
</blockquote>
<p>创建数据表, 加载数据, 并查询数据</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> myhive.test_load(</span><br><span class="line">    dt string comment <span class="string">&#x27;时间&#x27;</span>,</span><br><span class="line">    user_id string comment <span class="string">&#x27;用户id&#x27;</span>,</span><br><span class="line">    search_word string comment <span class="string">&#x27;关键词&#x27;</span>,</span><br><span class="line">    url string comment <span class="string">&#x27;网址&#x27;</span></span><br><span class="line">) <span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span>;</span><br><span class="line"><span class="comment">-- 数据加载[本地上传 --local]</span></span><br><span class="line">load data <span class="keyword">local</span> inpath <span class="string">&#x27;/export/data/search.txt&#x27;</span> <span class="keyword">into</span> <span class="keyword">table</span> myhive.test_load;</span><br><span class="line"><span class="comment">-- 查询数据</span></span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> myhive.test_load;</span><br></pre></td></tr></table></figure>

<blockquote>
<p><font color='cpink'>注意, 基于HDFS进行load加载数据时, 源数据文件会消失</font></p>
<p>使用hdfs的加载, 本质上是fs -mv 的过程</p>
</blockquote>
<p><code>overwrite</code>关键字</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line">load data <span class="keyword">local</span> inpath <span class="string">&#x27;/export/data/search.txt&#x27;</span> overwrite <span class="keyword">into</span> <span class="keyword">table</span> myhive.test_load;</span><br></pre></td></tr></table></figure>

<p>使用<code>overwrite</code>关键字会对源数据进行覆盖 (默认是追加)</p>
<h5 id="数据加载-INSERT-SELECT语法"><a href="#数据加载-INSERT-SELECT语法" class="headerlink" title="数据加载-INSERT SELECT语法"></a>数据加载-INSERT SELECT语法</h5><figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">insert</span> <span class="keyword">into</span><span class="operator">|</span>overwrite <span class="keyword">table</span> table_name <span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> <span class="keyword">table</span>;</span><br></pre></td></tr></table></figure>

<p>通过SQL语句, 从其他表中加载数据</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> myhive.test_load_2(</span><br><span class="line">    dt string comment <span class="string">&#x27;时间&#x27;</span>,</span><br><span class="line">    user_id string comment <span class="string">&#x27;用户id&#x27;</span>,</span><br><span class="line">    search_word string comment <span class="string">&#x27;关键词&#x27;</span>,</span><br><span class="line">    url string comment <span class="string">&#x27;网址&#x27;</span></span><br><span class="line">) <span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span>;</span><br><span class="line"></span><br><span class="line"><span class="comment">-- 追加</span></span><br><span class="line"><span class="keyword">insert</span> <span class="keyword">into</span> myhive.test_load_2 <span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> myhive.test_load;</span><br><span class="line"><span class="comment">-- 覆盖</span></span><br><span class="line"><span class="keyword">insert</span> overwrite <span class="keyword">table</span> myhive.test_load_2 <span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> myhive.test_load;</span><br></pre></td></tr></table></figure>

<h5 id="数据导出-INSERT-OVERWRITE方式"><a href="#数据导出-INSERT-OVERWRITE方式" class="headerlink" title="数据导出-INSERT OVERWRITE方式"></a>数据导出-INSERT OVERWRITE方式</h5><p>将Hive表中的数据导出到其他任意目录, 例如Linux本地磁盘, hdfs, mysql中等</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">insert</span> overwrite [<span class="keyword">local</span>] directory <span class="string">&#x27;path&#x27;</span> select_statement1 <span class="keyword">from</span> from_statement;</span><br></pre></td></tr></table></figure>

<ul>
<li><p>导出Linux本地</p>
<ol>
<li><p>使用默认分隔符</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">insert</span> overwrite <span class="keyword">local</span> directory <span class="string">&#x27;/export/output&#x27;</span> <span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> myhive.test_load;</span><br></pre></td></tr></table></figure>
</li>
<li><p>指定分隔符</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">insert</span> overwrite <span class="keyword">local</span> directory <span class="string">&#x27;/export/output&#x27;</span> <span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> myhive.test_load;</span><br></pre></td></tr></table></figure></li>
</ol>
</li>
<li><p>导出到HDFS上</p>
</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">insert</span> overwrite directory <span class="string">&#x27;/tmp/export&#x27;</span> <span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span> <span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> myhive.test_load;</span><br></pre></td></tr></table></figure>

<h4 id="Hive表数据导出-hive-shell"><a href="#Hive表数据导出-hive-shell" class="headerlink" title="Hive表数据导出-hive shell"></a>Hive表数据导出-hive shell</h4><ul>
<li>基本语法</li>
</ul>
<figure class="highlight bash"><table><tr><td class="code"><pre><span class="line">bin/hive -e <span class="string">&quot;select * from myhive.test_load;&quot;</span></span><br><span class="line"><span class="comment"># 通过linux重定向符导入到export3.t</span></span><br><span class="line">bin/hive -e <span class="string">&quot;select * from myhive.test_load;&quot;</span> &gt; /export/output/test_load.txt</span><br></pre></td></tr></table></figure>

<figure class="highlight bash"><table><tr><td class="code"><pre><span class="line"><span class="comment"># 创建一个export.sql的脚本文件</span></span><br><span class="line">vim export.sql</span><br><span class="line"><span class="comment"># 输入</span></span><br><span class="line">select * from myhive.test_load;</span><br><span class="line"><span class="comment"># -f 执行sql脚本</span></span><br><span class="line">/export/server/hive/bin/hive -f export.sql &gt; ./export_sql.txt</span><br></pre></td></tr></table></figure>

<h4 id="分区表"><a href="#分区表" class="headerlink" title="分区表"></a>分区表</h4><p>Hive中支持多个字段作为分区, 多分区带有层级关系</p>
<blockquote>
<p>如按年分2010-2023, 再按月分1-12, 再按日分1-30, …</p>
</blockquote>
<h5 id="分区表的使用"><a href="#分区表的使用" class="headerlink" title="分区表的使用"></a>分区表的使用</h5><ul>
<li>基本语法</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> table_name(</span><br><span class="line"> 数据列 列类型,</span><br><span class="line">    ...</span><br><span class="line">) partitioned <span class="keyword">by</span> (分区列 列类型, ...)</span><br><span class="line"><span class="type">row</span> fomat delitimed fields terminated <span class="keyword">by</span> <span class="string">&#x27;&#x27;</span>;</span><br></pre></td></tr></table></figure>

<ul>
<li>创建分区表(单分区)</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> score(</span><br><span class="line">    id string,</span><br><span class="line">    cid string,</span><br><span class="line">    score <span class="type">int</span></span><br><span class="line">) partitioned <span class="keyword">by</span> (<span class="keyword">month</span> string)</span><br><span class="line"><span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span>;</span><br></pre></td></tr></table></figure>

<ul>
<li>加载Linux本地数据到表中, 并指定分区列</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line">load  data <span class="keyword">local</span> inpath <span class="string">&#x27;/home/hadoop/score.txt&#x27;</span> overwrite <span class="keyword">into</span> <span class="keyword">table</span> score <span class="keyword">partition</span>(<span class="keyword">month</span><span class="operator">=</span><span class="string">&#x27;202304&#x27;</span>);</span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> score;</span><br></pre></td></tr></table></figure>

<p>数据列来源于创建表时的字段, 分区列来源于指定的分区, 各个分区的数据独立, 不会相互混淆</p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230423083842269.png" alt="image-20230423083842269"></p>
<ul>
<li>创建分区表(多分区)</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> score2(</span><br><span class="line">    id string,</span><br><span class="line">    cid string,</span><br><span class="line">    score <span class="type">int</span></span><br><span class="line">) partitioned <span class="keyword">by</span> (<span class="keyword">year</span> string, <span class="keyword">month</span> string, <span class="keyword">day</span> string)</span><br><span class="line"><span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span>;</span><br></pre></td></tr></table></figure>

<ul>
<li>加载数据</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line">load  data <span class="keyword">local</span> inpath <span class="string">&#x27;/home/hadoop/score.txt&#x27;</span></span><br><span class="line">    <span class="keyword">into</span> <span class="keyword">table</span> score2 <span class="keyword">partition</span>(<span class="keyword">year</span><span class="operator">=</span><span class="string">&#x27;2023&#x27;</span>, <span class="keyword">month</span><span class="operator">=</span><span class="string">&#x27;04&#x27;</span>, <span class="keyword">day</span><span class="operator">=</span><span class="string">&#x27;23&#x27;</span>);</span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> score2;</span><br></pre></td></tr></table></figure>

<blockquote>
<p>分区表可以极大提高特定场景下的Hive的操作性能(不用对全表进行操作)</p>
</blockquote>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> score2 <span class="keyword">where</span> <span class="keyword">year</span><span class="operator">=</span><span class="string">&#x27;2023&#x27;</span> <span class="keyword">and</span> <span class="keyword">month</span><span class="operator">=</span><span class="string">&#x27;04&#x27;</span> <span class="keyword">and</span> <span class="keyword">day</span><span class="operator">=</span><span class="string">&#x27;18&#x27;</span>;</span><br></pre></td></tr></table></figure>

<h4 id="分桶表"><a href="#分桶表" class="headerlink" title="分桶表"></a>分桶表</h4><p>分桶和分区一样, 也是一种通过改变表的储存模式, 从而完成对表优化的一种调优方式</p>
<p>分区是将表拆分到<strong>不同的子文件夹</strong>中, 而分桶是将表拆分到<strong>固定数量的不同文件</strong>中进行存储</p>
<h5 id="分桶表的创建"><a href="#分桶表的创建" class="headerlink" title="分桶表的创建"></a>分桶表的创建</h5><ul>
<li>开启分桶的自动优化( 自动匹配reduce task的数量和桶数量一致 )</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">set</span> hive.enforce.bucketing<span class="operator">=</span>ture;</span><br></pre></td></tr></table></figure>

<ul>
<li>创建分桶表</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span>  course(c_id string, c_name string, t_id string)</span><br><span class="line">clustered <span class="keyword">by</span> (c_id) <span class="keyword">into</span> <span class="number">3</span> buckets </span><br><span class="line"><span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span>;</span><br></pre></td></tr></table></figure>

<ul>
<li>分桶表的数据加载</li>
</ul>
<p>桶表的数据加载, 由于桶表的数据加载不能通过<code>load data</code>的方式, 只能通过<code>insert select</code>, 所以可以采用:</p>
<ol>
<li>创建一个临时表(内部&#x2F;外部均可), 通过<code>load data</code>加载数据到表中</li>
</ol>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span>  course_temp(c_id string, c_name string, t_id string)</span><br><span class="line"><span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span>;</span><br><span class="line">load data <span class="keyword">local</span> inpath <span class="string">&#x27;/home/hadoop/course.txt&#x27;</span> <span class="keyword">into</span> <span class="keyword">table</span> course_temp;</span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> course_temp;</span><br></pre></td></tr></table></figure>

<p>​ 通过<code>insert select</code>从临时表向桶表中插入数据</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">insert</span> overwrite <span class="keyword">table</span> course <span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> course_temp cluster <span class="keyword">by</span> (c_id);</span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> course;</span><br></pre></td></tr></table></figure>

<blockquote>
<p><strong>为什么不可以使用load data, 必须使用insert select插入数据</strong></p>
</blockquote>
<p>如果没有分桶设置, 插入(加载)数据只是简单的将数据列放入:</p>
<ul>
<li>表的存储文件夹中(无分区)</li>
<li>表的指定分区文件夹中(有分区)</li>
</ul>
<p>一旦有了分桶的设置, 当数据插入时, 需要一分为n, 进入n个桶文件中</p>
<ul>
<li>数据划分的规则</li>
</ul>
<p>数据的n份基于分桶列的值进行<strong>hash取模</strong>来决定, 由于load data不会触发MapReduce, 也就是没有计算过程(无法执行hash算法), 只能简单移动数据, 所以无法用于分桶表的数据插入</p>
<ul>
<li>Hash取模</li>
</ul>
<p>Hash算法是一种数据加密算法, 同样的值被Hash加密后的结果是一致的.</p>
<p>假设分桶数为3, 即将Hash的结果对3取模(除以3取其余数), 可以得到以下三种情况</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">0 1 2</span><br></pre></td></tr></table></figure>

<p>即, 无论任何数据 ,得到的取模结果均是: 0, 1, 2中的一个</p>
<p>所以<strong>必须使用insert select 的语法, 因为其会触发MapReduce, 进行取模计算.</strong></p>
<h5 id="分桶表的性能提升"><a href="#分桶表的性能提升" class="headerlink" title="分桶表的性能提升"></a>分桶表的性能提升</h5><blockquote>
<p>在指定的分区列的前提下, 减少被操作的数据量, 从而提升性能</p>
</blockquote>
<p>分桶表的性能提升: 基于分桶列的特定操作, 如: <strong>过滤, JOIN, 分组</strong>, 均可带来性能提升</p>
<h4 id="修改表"><a href="#修改表" class="headerlink" title="修改表"></a>修改表</h4><ul>
<li>表重命名</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> old_table_name <span class="keyword">to</span> new_table_name;</span><br></pre></td></tr></table></figure>

<p>如: <code>alter table score3 to score4;</code></p>
<ul>
<li>修改表属性值</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> table_name <span class="keyword">set</span> tblproperties table_properties;</span><br></pre></td></tr></table></figure>

<p><code>table_properties</code>: (property_name&#x3D;property_value, …)</p>
<p>如: <code>alter table table_name set tblproperties(&quot;EXTERNAL&quot;=&quot;TRUE&quot;);</code>修改内外部表属性</p>
<p> <code>alter table table_name set tblproperties(&quot;COMMENT&quot;=&quot;NEW_COMMENT&quot;);</code>修改表注释</p>
<ul>
<li>添加分区</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> table_name <span class="keyword">add</span> <span class="keyword">partition</span>(分区<span class="operator">=</span><span class="string">&#x27;&#x27;</span>);</span><br><span class="line"># 分区是空的没有数据, 需要手动添加或上传数据文件(创建了一个新的分区文件夹)</span><br></pre></td></tr></table></figure>

<ul>
<li>修改分区值</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> table_name <span class="keyword">partition</span>(分区<span class="operator">=</span><span class="string">&#x27;原分区&#x27;</span>) rename <span class="keyword">to</span> <span class="keyword">partition</span>(分区<span class="operator">=</span><span class="string">&#x27;新分区&#x27;</span>); </span><br></pre></td></tr></table></figure>

<ul>
<li>删除分区</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> table_name <span class="keyword">drop</span> <span class="keyword">partition</span>(分区<span class="operator">=</span><span class="string">&#x27;&#x27;</span>);</span><br></pre></td></tr></table></figure>

<ul>
<li>添加列</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> table_name <span class="keyword">add</span> columns(v1 type, v2 type, ...);</span><br></pre></td></tr></table></figure>

<ul>
<li>修改列名</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> table_name change v1 v1_new 原type</span><br></pre></td></tr></table></figure>

<ul>
<li>删除表</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">drop</span> <span class="keyword">table</span> table_name;</span><br></pre></td></tr></table></figure>

<ul>
<li>清空表</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">truncate</span> <span class="keyword">table</span> table_name;</span><br></pre></td></tr></table></figure>

<p>操作示例</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="comment">-- 1. 修改表名</span></span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> score2 rename <span class="keyword">to</span> score3;</span><br><span class="line"><span class="comment">-- 2. 修改表属性值</span></span><br><span class="line"><span class="keyword">desc</span> formatted score3;</span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> score3 <span class="keyword">set</span> tblproperties (&quot;EXTERNAL&quot;<span class="operator">=</span>&quot;TRUE&quot;);</span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> score3 <span class="keyword">set</span> tblproperties (&quot;COMMENT&quot;<span class="operator">=</span>&quot;this is comment&quot;);</span><br><span class="line"><span class="comment">-- 3. 添加表的分区</span></span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> score3 <span class="keyword">add</span> <span class="keyword">partition</span> (<span class="keyword">year</span><span class="operator">=</span><span class="string">&#x27;2023&#x27;</span>, <span class="keyword">month</span><span class="operator">=</span><span class="string">&#x27;01&#x27;</span>, <span class="keyword">day</span><span class="operator">=</span><span class="string">&#x27;01&#x27;</span>);</span><br><span class="line"><span class="comment">-- 4. 修改分区值(修改元数据记录, 在HDFS中的实体文件夹不会改名, 但是在元数据记录中是改了名的) - 一般不建议更改</span></span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> score3 <span class="keyword">partition</span> (<span class="keyword">year</span><span class="operator">=</span><span class="string">&#x27;2023&#x27;</span>, <span class="keyword">month</span><span class="operator">=</span><span class="string">&#x27;01&#x27;</span>, <span class="keyword">day</span><span class="operator">=</span><span class="string">&#x27;01&#x27;</span>) rename <span class="keyword">to</span> <span class="keyword">partition</span> (<span class="keyword">year</span><span class="operator">=</span><span class="string">&#x27;2023&#x27;</span>, <span class="keyword">month</span><span class="operator">=</span><span class="string">&#x27;01&#x27;</span>, <span class="keyword">day</span><span class="operator">=</span><span class="string">&#x27;16&#x27;</span>);</span><br><span class="line"><span class="comment">-- 5. 删除分区(只删除了元数据, 数据本身还在)</span></span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> score3 <span class="keyword">drop</span> <span class="keyword">partition</span> (<span class="keyword">year</span><span class="operator">=</span><span class="string">&#x27;2023&#x27;</span>, <span class="keyword">month</span><span class="operator">=</span><span class="string">&#x27;01&#x27;</span>, <span class="keyword">day</span><span class="operator">=</span><span class="string">&#x27;16&#x27;</span>);</span><br><span class="line"></span><br><span class="line"><span class="comment">-- 6. 添加列</span></span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> score3 <span class="keyword">add</span> columns (v1 <span class="type">int</span>, v2 string);</span><br><span class="line"><span class="comment">-- 7. 修改列名(不能该原本列的类型)</span></span><br><span class="line"><span class="keyword">alter</span> <span class="keyword">table</span> score3 change v1 v1_new <span class="type">int</span>;</span><br><span class="line"><span class="comment">-- 8. 清空表数据(只能清空内部表的数据)</span></span><br><span class="line"><span class="keyword">truncate</span> <span class="keyword">table</span> score3;</span><br><span class="line"><span class="comment">-- 9. 删除表</span></span><br><span class="line"><span class="keyword">drop</span> <span class="keyword">table</span> score3</span><br></pre></td></tr></table></figure>

<h4 id="复杂类型操作"><a href="#复杂类型操作" class="headerlink" title="复杂类型操作"></a>复杂类型操作</h4><h5 id="Array-数组类型"><a href="#Array-数组类型" class="headerlink" title="Array 数组类型"></a>Array 数组类型</h5><blockquote>
<p>在一个列中表示多个信息, 保存一组相同类型的元素</p>
</blockquote>
<p>创建一个Array数据表</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> test_array(name string, work_locations <span class="keyword">array</span><span class="operator">&lt;</span>string<span class="operator">&gt;</span>)</span><br><span class="line"><span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;\t&#x27;</span></span><br><span class="line">collection items terminated <span class="keyword">by</span> <span class="string">&#x27;,&#x27;</span>;</span><br></pre></td></tr></table></figure>

<p>通过load data加载数据</p>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line">load data <span class="keyword">local</span> inpath <span class="string">&#x27;/home/hadoop/data_for_array.txt&#x27;</span> <span class="keyword">into</span> <span class="keyword">table</span> test_array;</span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> test_array;</span><br></pre></td></tr></table></figure>

<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="comment">-- 查询每个人的第一个工作地点</span></span><br><span class="line"><span class="keyword">select</span> name, work_locations[<span class="number">0</span>] <span class="keyword">from</span> test_array;</span><br><span class="line"><span class="comment">-- 查询每个人工作地点个数</span></span><br><span class="line"><span class="keyword">select</span> name, size(work_locations) <span class="keyword">from</span> test_array;</span><br><span class="line"><span class="comment">-- 查询哪一个在天津工作过(work_locations包含tianjin)</span></span><br><span class="line"><span class="keyword">select</span> name <span class="keyword">from</span> test_array <span class="keyword">where</span> array_contains(work_locations, <span class="string">&#x27;tianjin&#x27;</span>);</span><br></pre></td></tr></table></figure>

<h5 id="Map类型"><a href="#Map类型" class="headerlink" title="Map类型"></a>Map类型</h5><p>Map类型就是: key-value型数据格式, 如下数据文件显示, 其中<code>members</code>字段是key-value型数据, 字段与字段分隔符为’,’ ; map字段之间的分隔符为’#’ ; map内部k-v分隔符为 ‘:’</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">id, name, members, age</span><br><span class="line">1, zhangsan, father:xiaoming#mother:xiaohong, 28</span><br><span class="line">2, lisi, father:libai#mother:damei, 22</span><br><span class="line">...</span><br></pre></td></tr></table></figure>

<ul>
<li>建表语句</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> test_map(</span><br><span class="line"> id <span class="type">int</span>,</span><br><span class="line">    name string,</span><br><span class="line">    members map<span class="operator">&lt;</span>string, string<span class="operator">&gt;</span>,</span><br><span class="line">    age <span class="type">int</span></span><br><span class="line">)<span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;,&#x27;</span></span><br><span class="line">collection items terminated <span class="keyword">by</span> <span class="string">&#x27;#&#x27;</span></span><br><span class="line">map keys terminated <span class="keyword">by</span> <span class="string">&#x27;:&#x27;</span>;</span><br></pre></td></tr></table></figure>

<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line">load data <span class="keyword">local</span> inpath <span class="string">&#x27;/export/data/data_for_map.txt&#x27;</span> <span class="keyword">into</span> <span class="keyword">table</span> test_map;</span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> test_map;</span><br></pre></td></tr></table></figure>

<ul>
<li>其他使用</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="comment">-- 查看成员中 每个的父亲</span></span><br><span class="line"><span class="keyword">select</span> id, name, members[<span class="string">&#x27;father&#x27;</span>] <span class="keyword">from</span> test_map t;</span><br><span class="line"><span class="comment">-- 取出map中的全部key, 返回类型是array</span></span><br><span class="line"><span class="keyword">select</span> map_keys(members) <span class="keyword">from</span> test_map;</span><br><span class="line"><span class="comment">-- 取出map中的全部value -&gt; array</span></span><br><span class="line"><span class="keyword">select</span> map_values(members) <span class="keyword">from</span> test_map;</span><br><span class="line"><span class="comment">-- size查看map的元素中(k-v对)的个数</span></span><br><span class="line"><span class="keyword">select</span> size(members) <span class="keyword">from</span> test_map;</span><br><span class="line"><span class="comment">-- array_contains查看指定的数据是否包含在map中, 是否有sister这个key</span></span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> test_map <span class="keyword">where</span> array_contains(map_keys(members), <span class="string">&#x27;sister&#x27;</span>);</span><br><span class="line"><span class="comment">-- array_contains查看指定的数据是否包含在map中, 是否有 王林 这个value</span></span><br><span class="line"><span class="keyword">select</span> <span class="operator">*</span> <span class="keyword">from</span> test_map <span class="keyword">where</span> array_contains(map_values(members), <span class="string">&#x27;王林&#x27;</span>);</span><br></pre></td></tr></table></figure>

<h5 id="Struct类型"><a href="#Struct类型" class="headerlink" title="Struct类型"></a>Struct类型</h5><p>Struct类型是一个复合类型, 可以在一个列中存入多个子列, 每个子列允许设置类型和名称</p>
<p>如下文件, 字段之间’#’分隔, struct之间’:’分隔</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">1#周杰伦:11</span><br><span class="line">2#李锦记:16</span><br><span class="line">...</span><br></pre></td></tr></table></figure>

<ul>
<li>建表语句</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">create</span> <span class="keyword">table</span> test_struct(</span><br><span class="line"> id string,</span><br><span class="line">    info struct<span class="operator">&lt;</span>name:string, age:<span class="type">int</span><span class="operator">&gt;</span></span><br><span class="line">) <span class="type">row</span> format delimited fields terminated <span class="keyword">by</span> <span class="string">&#x27;#&#x27;</span></span><br><span class="line">    COLLECTION ITEMS TERMINATED <span class="keyword">BY</span> <span class="string">&#x27;:&#x27;</span>;</span><br></pre></td></tr></table></figure>

<ul>
<li>加载数据并显示</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line">load data <span class="keyword">local</span> inpath <span class="string">&#x27;/export/data/data_for_struct.txt&#x27;</span> <span class="keyword">into</span> <span class="keyword">table</span> test_struct;</span><br><span class="line"><span class="keyword">select</span> id, info <span class="keyword">from</span> test_struct;</span><br><span class="line"><span class="comment">-- 此处的info是一个object类型</span></span><br></pre></td></tr></table></figure>

<ul>
<li>可以通过如下方法得到具体信息值</li>
</ul>
<figure class="highlight sql"><table><tr><td class="code"><pre><span class="line"><span class="keyword">select</span> id, info.name, info.age <span class="keyword">from</span> test_struct;</span><br></pre></td></tr></table></figure>

<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/20230419203111.png" alt="image-20230419203109714"></p>
<h2 id="Q-amp-A"><a href="#Q-amp-A" class="headerlink" title="Q&amp;A"></a>Q&amp;A</h2><h3 id="rpm卸载mariadb报错"><a href="#rpm卸载mariadb报错" class="headerlink" title="rpm卸载mariadb报错"></a>rpm卸载mariadb报错</h3><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422155806879.png" alt="image-20230422155806879"></p>
<p>解决办法：</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">rpm -e --nodeps mariadb-libs-5.5.68-1.el7.x86_64</span><br></pre></td></tr></table></figure>

<h3 id="adduser和useradd区别"><a href="#adduser和useradd区别" class="headerlink" title="adduser和useradd区别"></a>adduser和useradd区别</h3><ol>
<li><p>在root权限下，useradd只是创建了一个用户名，如 （useradd +用户名 ），它并没有在&#x2F;home目录下创建同名文件夹，也没有创建密码，因此利用这个用户登录系统，是登录不了的，为了避免这样的情况出现，可以用 （useradd -m +用户名）的方式创建，它会在&#x2F;home目录下创建同名文件夹，然后利用（ passwd + 用户名）为指定的用户名设置密码。</p>
</li>
<li><p>可以直接利用<strong>adduser</strong>创建新用户（adduser +用户名）这样<strong>在&#x2F;home目录下会自动创建同名文件夹</strong></p>
</li>
<li><p>删除用户，只需使用一个简单的命令“userdel 用户名”即可。不过最好将它留在系统上的文件也删除掉，你可以使用“userdel -r 用户名”来实现这一目的。</p>
</li>
</ol>
<h3 id="mysql开机自启动出错"><a href="#mysql开机自启动出错" class="headerlink" title="mysql开机自启动出错"></a>mysql开机自启动出错</h3><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422171440473.png" alt="image-20230422171440473"></p>
<p>解决办法：</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">/sbin/chkconfig mysqld on</span><br></pre></td></tr></table></figure>

<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422202339916.png" alt="image-20230422202339916"></p>
<h3 id="beeline连接出错"><a href="#beeline连接出错" class="headerlink" title="beeline连接出错"></a>beeline连接出错</h3><p>解决办法：在hadoop的core-site.xml文件中修改内容：将root用户名改成自己的用户名</p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/00a95f24feba4108883a9446fef7f74e.png" alt="img"></p>
<p>修改完记得保存， 注意，如果是运行在三台虚拟机组成的集群上，还需要将core-site.xml分发到每一台服务器上，之后，重新启动hadoop，接着启动mysql,metastore,hiveserver2和beeline</p>
<p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230422182950522.png" alt="image-20230422182950522"></p>
<h3 id="MYSQL重启报PID文件丢失-持续更新"><a href="#MYSQL重启报PID文件丢失-持续更新" class="headerlink" title="MYSQL重启报PID文件丢失(持续更新)"></a><a target="_blank" rel="noopener" href="https://www.cnblogs.com/hongliang-dba/p/14637434.html">MYSQL重启报PID文件丢失(持续更新)</a></h3><figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">mkdir -p /var/run/mysqld</span><br><span class="line">chown mysql:mysql /var/run/mysqld</span><br><span class="line">touch /var/run/mysqld/mysqld.pid</span><br><span class="line">chown mysql:mysql /var/run/mysqld/mysqld.pid</span><br></pre></td></tr></table></figure>

<h3 id="Hdfs页面操作文件出现"><a href="#Hdfs页面操作文件出现" class="headerlink" title="Hdfs页面操作文件出现"></a>Hdfs页面操作文件出现</h3><p>Permission denied: user&#x3D;dr.who, access&#x3D;WRITE, inode&#x3D;“&#x2F;“:hadoop:supergroup:drwxr-xr问题解决</p>
<p>dr.who是hadoop中http访问的静态用户名</p>
<p>通过修改core-site.xml，配置为当前用户，</p>
<figure class="highlight plaintext"><table><tr><td class="code"><pre><span class="line">&lt;property&gt;</span><br><span class="line">    &lt;name&gt;hadoop.http.staticuser.user&lt;/name&gt;</span><br><span class="line">    &lt;value&gt;hadoop&lt;/value&gt;</span><br><span class="line">&lt;/property&gt;</span><br></pre></td></tr></table></figure>

<h3 id="执行insert语句非常慢"><a href="#执行insert语句非常慢" class="headerlink" title="执行insert语句非常慢"></a>执行insert语句非常慢</h3><p><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/typora/image-20230423085701426.png" alt="image-20230423085701426"></p>
<h3 id="网卡连接失败"><a href="#网卡连接失败" class="headerlink" title="网卡连接失败"></a>网卡连接失败</h3><p>禁用NetworkManager</p>
<figure class="highlight bash"><table><tr><td class="code"><pre><span class="line">systemctl stop NetworkManager            停止服务</span><br><span class="line"></span><br><span class="line">systemctl <span class="built_in">disable</span> NetworkManager         开机不自启</span><br></pre></td></tr></table></figure>
</article><div class="post-copyright"><div class="post-copyright__author"><span class="post-copyright-meta">文章作者: </span><span class="post-copyright-info"><a href="https://gitee.com/frode117">frode</a></span></div><div class="post-copyright__type"><span class="post-copyright-meta">文章链接: </span><span class="post-copyright-info"><a href="https://gitee.com/frode117/2023/04/27/hadoop-study/">https://gitee.com/frode117/2023/04/27/hadoop-study/</a></span></div><div class="post-copyright__notice"><span class="post-copyright-meta">版权声明: </span><span class="post-copyright-info">本博客所有文章除特别声明外，均采用 <a href="https://creativecommons.org/licenses/by-nc-sa/4.0/" target="_blank">CC BY-NC-SA 4.0</a> 许可协议。转载请注明来自 <a href="https://gitee.com/frode117" target="_blank">mundane</a>！</span></div></div><div class="tag_share"><div class="post-meta__tag-list"><a class="post-meta__tags" href="/mundane/tags/hadoop/">hadoop</a><a class="post-meta__tags" href="/mundane/tags/hive/">hive</a></div><div class="post_share"><div class="social-share" data-image="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/img/67190321_p0.png" data-sites="twitter,wechat,qq"></div><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/butterfly-extsrc/sharejs/dist/css/share.min.css" media="print" onload="this.media='all'"><script src="https://cdn.jsdelivr.net/npm/butterfly-extsrc/sharejs/dist/js/social-share.min.js" defer></script></div></div><nav class="pagination-post" id="pagination"><div class="prev-post pull-left"><a href="/mundane/2023/04/27/mysql-study/" title="mysql学习"><img class="cover" src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/img/suzume.png" onerror="onerror=null;src='/mundane/img/404.jpg'" alt="cover of previous post"><div class="pagination-info"><div class="label">上一篇</div><div class="prev_info">mysql学习</div></div></a></div><div class="next-post pull-right"><a href="/mundane/2023/04/16/linux%E5%AD%A6%E4%B9%A0/" title="linux学习"><img class="cover" src="/mundane/img/twist.png" onerror="onerror=null;src='/mundane/img/404.jpg'" alt="cover of next post"><div class="pagination-info"><div class="label">下一篇</div><div class="next_info">linux学习</div></div></a></div></nav><hr/><div id="post-comment"><div class="comment-head"><div class="comment-headline"><i class="fas fa-comments fa-fw"></i><span> 评论</span></div></div><div class="comment-wrap"><div><div class="vcomment" id="vcomment"></div></div></div></div></div><div class="aside-content" id="aside-content"><div class="card-widget card-info"><div class="is-center"><div class="avatar-img"><img src="/mundane/img/violet.jpg" onerror="this.onerror=null;this.src='/mundane/img/friend_404.gif'" alt="avatar"/></div><div class="author-info__name">frode</div><div class="author-info__description">this is my blog</div></div><div class="card-info-data site-data is-center"><a href="/mundane/archives/"><div class="headline">文章</div><div class="length-num">4</div></a><a href="/mundane/tags/"><div class="headline">标签</div><div class="length-num">8</div></a><a href="/mundane/categories/"><div class="headline">分类</div><div class="length-num">4</div></a></div><a id="card-info-btn" target="_blank" rel="noopener" href="https://github.com/munDane117"><i class="fab fa-github"></i><span>Follow Me</span></a></div><div class="card-widget card-announcement"><div class="item-headline"><i class="fas fa-bullhorn fa-shake"></i><span>公告</span></div><div class="announcement_content">stay hungry,stay foolish</div></div><div class="sticky_layout"><div class="card-widget" id="card-toc"><div class="item-headline"><i class="fas fa-stream"></i><span>目录</span><span class="toc-percentage"></span></div><div class="toc-content"><ol class="toc"><li class="toc-item toc-level-1"><a class="toc-link" href="#Hadoop"><span class="toc-number">1.</span> <span class="toc-text">Hadoop</span></a><ol class="toc-child"><li class="toc-item toc-level-2"><a class="toc-link" href="#HDFS"><span class="toc-number">1.1.</span> <span class="toc-text">HDFS</span></a><ol class="toc-child"><li class="toc-item toc-level-3"><a class="toc-link" href="#%E6%96%87%E4%BB%B6%E7%B3%BB%E7%BB%9F%E5%8D%8F%E8%AE%AE"><span class="toc-number">1.1.1.</span> <span class="toc-text">文件系统协议</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E5%90%84%E8%A7%92%E8%89%B2%E8%81%8C%E8%B4%A3"><span class="toc-number">1.1.2.</span> <span class="toc-text">各角色职责</span></a><ol class="toc-child"><li class="toc-item toc-level-4"><a class="toc-link" href="#%E4%B8%BB%E8%A7%92%E8%89%B2%EF%BC%9ANameNode"><span class="toc-number">1.1.2.1.</span> <span class="toc-text">主角色：NameNode</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#%E4%BB%8E%E8%A7%92%E8%89%B2%EF%BC%9ADataNode"><span class="toc-number">1.1.2.2.</span> <span class="toc-text">从角色：DataNode</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#Namenode%E8%81%8C%E8%B4%A3"><span class="toc-number">1.1.2.3.</span> <span class="toc-text">Namenode职责</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#DataNode%E8%81%8C%E8%B4%A3"><span class="toc-number">1.1.2.4.</span> <span class="toc-text">DataNode职责</span></a></li></ol></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E5%B7%A5%E4%BD%9C%E6%B5%81%E7%A8%8B%E4%B8%8E%E6%9C%BA%E5%88%B6"><span class="toc-number">1.1.3.</span> <span class="toc-text">工作流程与机制</span></a><ol class="toc-child"><li class="toc-item toc-level-4"><a class="toc-link" href="#%E5%86%99%E6%95%B0%E6%8D%AE"><span class="toc-number">1.1.3.1.</span> <span class="toc-text">写数据</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#%E6%B5%81%E7%A8%8B%E6%80%BB%E7%BB%93"><span class="toc-number">1.1.3.2.</span> <span class="toc-text">流程总结</span></a></li></ol></li></ol></li><li class="toc-item toc-level-2"><a class="toc-link" href="#MapReduce"><span class="toc-number">1.2.</span> <span class="toc-text">MapReduce</span></a><ol class="toc-child"><li class="toc-item toc-level-3"><a class="toc-link" href="#%E7%89%B9%E7%82%B9"><span class="toc-number">1.2.1.</span> <span class="toc-text">特点</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E5%B1%80%E9%99%90%E6%80%A7"><span class="toc-number">1.2.2.</span> <span class="toc-text">局限性</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E5%88%86%E5%B8%83%E5%BC%8F%E8%AE%A1%E7%AE%97"><span class="toc-number">1.2.3.</span> <span class="toc-text">分布式计算</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E5%AE%9E%E4%BE%8B%E8%BF%9B%E7%A8%8B"><span class="toc-number">1.2.4.</span> <span class="toc-text">实例进程</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E9%98%B6%E6%AE%B5%E7%BB%84%E6%88%90"><span class="toc-number">1.2.5.</span> <span class="toc-text">阶段组成</span></a><ol class="toc-child"><li class="toc-item toc-level-4"><a class="toc-link" href="#Map%E9%98%B6%E6%AE%B5"><span class="toc-number">1.2.5.1.</span> <span class="toc-text">Map阶段</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#Reduce%E9%98%B6%E6%AE%B5"><span class="toc-number">1.2.5.2.</span> <span class="toc-text">Reduce阶段</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#shuffle"><span class="toc-number">1.2.5.3.</span> <span class="toc-text">shuffle</span></a></li></ol></li></ol></li><li class="toc-item toc-level-2"><a class="toc-link" href="#YARN"><span class="toc-number">1.3.</span> <span class="toc-text">YARN</span></a><ol class="toc-child"><li class="toc-item toc-level-3"><a class="toc-link" href="#%E6%9E%B6%E6%9E%84"><span class="toc-number">1.3.1.</span> <span class="toc-text">架构</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E6%A0%B8%E5%BF%83%E4%BA%A4%E4%BA%92%E6%B5%81%E7%A8%8B"><span class="toc-number">1.3.2.</span> <span class="toc-text">核心交互流程</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E8%B5%84%E6%BA%90%E8%B0%83%E5%BA%A6%E5%99%A8Scheduler"><span class="toc-number">1.3.3.</span> <span class="toc-text">资源调度器Scheduler</span></a><ol class="toc-child"><li class="toc-item toc-level-4"><a class="toc-link" href="#FIFO-Scheduler%EF%BC%88%E5%85%88%E8%BF%9B%E5%85%88%E5%87%BA%E8%B0%83%E5%BA%A6%E5%99%A8%EF%BC%89"><span class="toc-number">1.3.3.1.</span> <span class="toc-text">FIFO Scheduler（先进先出调度器）</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#Capacity-Scheduler%EF%BC%88%E5%AE%B9%E9%87%8F%E8%B0%83%E5%BA%A6%E5%99%A8%EF%BC%89"><span class="toc-number">1.3.3.2.</span> <span class="toc-text">Capacity Scheduler（容量调度器）</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#Fair-Scheduler%EF%BC%88%E5%85%AC%E5%B9%B3%E8%B0%83%E5%BA%A6%E5%99%A8%EF%BC%89"><span class="toc-number">1.3.3.3.</span> <span class="toc-text">Fair Scheduler（公平调度器）</span></a></li></ol></li></ol></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Apache-Hive"><span class="toc-number">1.4.</span> <span class="toc-text">Apache Hive</span></a><ol class="toc-child"><li class="toc-item toc-level-3"><a class="toc-link" href="#%E6%95%B0%E4%BB%93%E4%B8%BB%E8%A6%81%E7%89%B9%E5%BE%81"><span class="toc-number">1.4.1.</span> <span class="toc-text">数仓主要特征</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#SQL"><span class="toc-number">1.4.2.</span> <span class="toc-text">SQL</span></a><ol class="toc-child"><li class="toc-item toc-level-4"><a class="toc-link" href="#SQL%E8%AF%AD%E6%B3%95%E5%88%86%E7%B1%BB"><span class="toc-number">1.4.2.1.</span> <span class="toc-text">SQL语法分类</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#Hive%E5%92%8CHadoop%E5%85%B3%E7%B3%BB"><span class="toc-number">1.4.2.2.</span> <span class="toc-text">Hive和Hadoop关系</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#%E5%AF%B9Hive%E7%9A%84%E7%90%86%E8%A7%A3"><span class="toc-number">1.4.2.3.</span> <span class="toc-text">对Hive的理解</span></a></li></ol></li><li class="toc-item toc-level-3"><a class="toc-link" href="#Hive%E6%9E%B6%E6%9E%84%E5%9B%BE"><span class="toc-number">1.4.3.</span> <span class="toc-text">Hive架构图</span></a><ol class="toc-child"><li class="toc-item toc-level-4"><a class="toc-link" href="#%E5%85%83%E6%95%B0%E6%8D%AE"><span class="toc-number">1.4.3.1.</span> <span class="toc-text">元数据</span></a></li></ol></li><li class="toc-item toc-level-3"><a class="toc-link" href="#Hive%E9%83%A8%E7%BD%B2"><span class="toc-number">1.4.4.</span> <span class="toc-text">Hive部署</span></a><ol class="toc-child"><li class="toc-item toc-level-4"><a class="toc-link" href="#%E6%95%B4%E5%90%88Hive"><span class="toc-number">1.4.4.1.</span> <span class="toc-text">整合Hive</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#%E5%AE%89%E8%A3%85mysql"><span class="toc-number">1.4.4.2.</span> <span class="toc-text">安装mysql</span></a></li></ol></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E5%90%AF%E5%8A%A8Hive"><span class="toc-number">1.4.5.</span> <span class="toc-text">启动Hive</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#Hive%E5%AE%A2%E6%88%B7%E7%AB%AF%E4%BD%BF%E7%94%A8"><span class="toc-number">1.4.6.</span> <span class="toc-text">Hive客户端使用</span></a><ol class="toc-child"><li class="toc-item toc-level-4"><a class="toc-link" href="#bin-x2F-beeline%E5%AE%A2%E6%88%B7%E7%AB%AF%E4%BD%BF%E7%94%A8"><span class="toc-number">1.4.6.1.</span> <span class="toc-text">bin&#x2F;beeline客户端使用</span></a></li></ol></li><li class="toc-item toc-level-3"><a class="toc-link" href="#pycharm-amp-Hive"><span class="toc-number">1.4.7.</span> <span class="toc-text">pycharm &amp; Hive</span></a><ol class="toc-child"><li class="toc-item toc-level-4"><a class="toc-link" href="#%E5%88%9B%E5%BB%BA%E8%A1%A8%E7%9A%84%E8%AF%AD%E6%B3%95"><span class="toc-number">1.4.7.1.</span> <span class="toc-text">创建表的语法</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#hive%E6%89%80%E6%94%AF%E6%8C%81%E7%9A%84%E6%95%B0%E6%8D%AE%E7%B1%BB%E5%9E%8B"><span class="toc-number">1.4.7.2.</span> <span class="toc-text">hive所支持的数据类型</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#Hive%E7%9A%84%E8%A1%A8%E7%B1%BB%E5%9E%8B"><span class="toc-number">1.4.7.3.</span> <span class="toc-text">Hive的表类型</span></a><ol class="toc-child"><li class="toc-item toc-level-5"><a class="toc-link" href="#%E5%86%85%E9%83%A8%E8%A1%A8"><span class="toc-number">1.4.7.3.1.</span> <span class="toc-text">内部表</span></a></li><li class="toc-item toc-level-5"><a class="toc-link" href="#%E5%A4%96%E9%83%A8%E8%A1%A8"><span class="toc-number">1.4.7.3.2.</span> <span class="toc-text">外部表</span></a><ol class="toc-child"><li class="toc-item toc-level-6"><a class="toc-link" href="#1-%E5%85%88%E6%9C%89%E8%A1%A8-%E5%90%8E%E6%9C%89%E6%95%B0%E6%8D%AE"><span class="toc-number">1.4.7.3.2.1.</span> <span class="toc-text">1.先有表, 后有数据</span></a></li><li class="toc-item toc-level-6"><a class="toc-link" href="#2-%E5%85%88%E6%9C%89%E6%95%B0%E6%8D%AE-%E5%90%8E%E6%9C%89%E8%A1%A8"><span class="toc-number">1.4.7.3.2.2.</span> <span class="toc-text">2.先有数据, 后有表</span></a></li></ol></li><li class="toc-item toc-level-5"><a class="toc-link" href="#%E5%86%85-x2F-%E5%A4%96%E9%83%A8%E8%A1%A8%E8%BD%AC%E6%8D%A2"><span class="toc-number">1.4.7.3.3.</span> <span class="toc-text">内&#x2F;外部表转换</span></a></li></ol></li><li class="toc-item toc-level-4"><a class="toc-link" href="#%E6%95%B0%E6%8D%AE%E5%8A%A0%E8%BD%BD%E5%92%8C%E5%AF%BC%E5%87%BA"><span class="toc-number">1.4.7.4.</span> <span class="toc-text">数据加载和导出</span></a><ol class="toc-child"><li class="toc-item toc-level-5"><a class="toc-link" href="#%E6%95%B0%E6%8D%AE%E5%8A%A0%E8%BD%BD-LOAD%E8%AF%AD%E6%B3%95"><span class="toc-number">1.4.7.4.1.</span> <span class="toc-text">数据加载-LOAD语法</span></a></li><li class="toc-item toc-level-5"><a class="toc-link" href="#%E6%95%B0%E6%8D%AE%E5%8A%A0%E8%BD%BD-INSERT-SELECT%E8%AF%AD%E6%B3%95"><span class="toc-number">1.4.7.4.2.</span> <span class="toc-text">数据加载-INSERT SELECT语法</span></a></li><li class="toc-item toc-level-5"><a class="toc-link" href="#%E6%95%B0%E6%8D%AE%E5%AF%BC%E5%87%BA-INSERT-OVERWRITE%E6%96%B9%E5%BC%8F"><span class="toc-number">1.4.7.4.3.</span> <span class="toc-text">数据导出-INSERT OVERWRITE方式</span></a></li></ol></li><li class="toc-item toc-level-4"><a class="toc-link" href="#Hive%E8%A1%A8%E6%95%B0%E6%8D%AE%E5%AF%BC%E5%87%BA-hive-shell"><span class="toc-number">1.4.7.5.</span> <span class="toc-text">Hive表数据导出-hive shell</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#%E5%88%86%E5%8C%BA%E8%A1%A8"><span class="toc-number">1.4.7.6.</span> <span class="toc-text">分区表</span></a><ol class="toc-child"><li class="toc-item toc-level-5"><a class="toc-link" href="#%E5%88%86%E5%8C%BA%E8%A1%A8%E7%9A%84%E4%BD%BF%E7%94%A8"><span class="toc-number">1.4.7.6.1.</span> <span class="toc-text">分区表的使用</span></a></li></ol></li><li class="toc-item toc-level-4"><a class="toc-link" href="#%E5%88%86%E6%A1%B6%E8%A1%A8"><span class="toc-number">1.4.7.7.</span> <span class="toc-text">分桶表</span></a><ol class="toc-child"><li class="toc-item toc-level-5"><a class="toc-link" href="#%E5%88%86%E6%A1%B6%E8%A1%A8%E7%9A%84%E5%88%9B%E5%BB%BA"><span class="toc-number">1.4.7.7.1.</span> <span class="toc-text">分桶表的创建</span></a></li><li class="toc-item toc-level-5"><a class="toc-link" href="#%E5%88%86%E6%A1%B6%E8%A1%A8%E7%9A%84%E6%80%A7%E8%83%BD%E6%8F%90%E5%8D%87"><span class="toc-number">1.4.7.7.2.</span> <span class="toc-text">分桶表的性能提升</span></a></li></ol></li><li class="toc-item toc-level-4"><a class="toc-link" href="#%E4%BF%AE%E6%94%B9%E8%A1%A8"><span class="toc-number">1.4.7.8.</span> <span class="toc-text">修改表</span></a></li><li class="toc-item toc-level-4"><a class="toc-link" href="#%E5%A4%8D%E6%9D%82%E7%B1%BB%E5%9E%8B%E6%93%8D%E4%BD%9C"><span class="toc-number">1.4.7.9.</span> <span class="toc-text">复杂类型操作</span></a><ol class="toc-child"><li class="toc-item toc-level-5"><a class="toc-link" href="#Array-%E6%95%B0%E7%BB%84%E7%B1%BB%E5%9E%8B"><span class="toc-number">1.4.7.9.1.</span> <span class="toc-text">Array 数组类型</span></a></li><li class="toc-item toc-level-5"><a class="toc-link" href="#Map%E7%B1%BB%E5%9E%8B"><span class="toc-number">1.4.7.9.2.</span> <span class="toc-text">Map类型</span></a></li><li class="toc-item toc-level-5"><a class="toc-link" href="#Struct%E7%B1%BB%E5%9E%8B"><span class="toc-number">1.4.7.9.3.</span> <span class="toc-text">Struct类型</span></a></li></ol></li></ol></li></ol></li><li class="toc-item toc-level-2"><a class="toc-link" href="#Q-amp-A"><span class="toc-number">1.5.</span> <span class="toc-text">Q&amp;A</span></a><ol class="toc-child"><li class="toc-item toc-level-3"><a class="toc-link" href="#rpm%E5%8D%B8%E8%BD%BDmariadb%E6%8A%A5%E9%94%99"><span class="toc-number">1.5.1.</span> <span class="toc-text">rpm卸载mariadb报错</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#adduser%E5%92%8Cuseradd%E5%8C%BA%E5%88%AB"><span class="toc-number">1.5.2.</span> <span class="toc-text">adduser和useradd区别</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#mysql%E5%BC%80%E6%9C%BA%E8%87%AA%E5%90%AF%E5%8A%A8%E5%87%BA%E9%94%99"><span class="toc-number">1.5.3.</span> <span class="toc-text">mysql开机自启动出错</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#beeline%E8%BF%9E%E6%8E%A5%E5%87%BA%E9%94%99"><span class="toc-number">1.5.4.</span> <span class="toc-text">beeline连接出错</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#MYSQL%E9%87%8D%E5%90%AF%E6%8A%A5PID%E6%96%87%E4%BB%B6%E4%B8%A2%E5%A4%B1-%E6%8C%81%E7%BB%AD%E6%9B%B4%E6%96%B0"><span class="toc-number">1.5.5.</span> <span class="toc-text">MYSQL重启报PID文件丢失(持续更新)</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#Hdfs%E9%A1%B5%E9%9D%A2%E6%93%8D%E4%BD%9C%E6%96%87%E4%BB%B6%E5%87%BA%E7%8E%B0"><span class="toc-number">1.5.6.</span> <span class="toc-text">Hdfs页面操作文件出现</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E6%89%A7%E8%A1%8Cinsert%E8%AF%AD%E5%8F%A5%E9%9D%9E%E5%B8%B8%E6%85%A2"><span class="toc-number">1.5.7.</span> <span class="toc-text">执行insert语句非常慢</span></a></li><li class="toc-item toc-level-3"><a class="toc-link" href="#%E7%BD%91%E5%8D%A1%E8%BF%9E%E6%8E%A5%E5%A4%B1%E8%B4%A5"><span class="toc-number">1.5.8.</span> <span class="toc-text">网卡连接失败</span></a></li></ol></li></ol></li></ol></div></div><div class="card-widget card-recent-post"><div class="item-headline"><i class="fas fa-history"></i><span>最新文章</span></div><div class="aside-list"><div class="aside-list-item"><a class="thumbnail" href="/mundane/2023/04/27/mysql-study/" title="mysql学习"><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/img/suzume.png" onerror="this.onerror=null;this.src='/mundane/img/404.jpg'" alt="mysql学习"/></a><div class="content"><a class="title" href="/mundane/2023/04/27/mysql-study/" title="mysql学习">mysql学习</a><time datetime="2023-04-27T15:01:33.000Z" title="发表于 2023-04-27 23:01:33">2023-04-27</time></div></div><div class="aside-list-item"><a class="thumbnail" href="/mundane/2023/04/27/hadoop-study/" title="hadoop学习"><img src="https://mundane-1317827053.cos.ap-chengdu.myqcloud.com/img/67190321_p0.png" onerror="this.onerror=null;this.src='/mundane/img/404.jpg'" alt="hadoop学习"/></a><div class="content"><a class="title" href="/mundane/2023/04/27/hadoop-study/" title="hadoop学习">hadoop学习</a><time datetime="2023-04-27T14:47:36.000Z" title="发表于 2023-04-27 22:47:36">2023-04-27</time></div></div><div class="aside-list-item"><a class="thumbnail" href="/mundane/2023/04/16/linux%E5%AD%A6%E4%B9%A0/" title="linux学习"><img src="/mundane/img/twist.png" onerror="this.onerror=null;this.src='/mundane/img/404.jpg'" alt="linux学习"/></a><div class="content"><a class="title" href="/mundane/2023/04/16/linux%E5%AD%A6%E4%B9%A0/" title="linux学习">linux学习</a><time datetime="2023-04-16T14:52:19.000Z" title="发表于 2023-04-16 22:52:19">2023-04-16</time></div></div><div class="aside-list-item"><a class="thumbnail" href="/mundane/2023/04/15/hello-world/" title="Hello World"><img src="/mundane/img/girl.jpg" onerror="this.onerror=null;this.src='/mundane/img/404.jpg'" alt="Hello World"/></a><div class="content"><a class="title" href="/mundane/2023/04/15/hello-world/" title="Hello World">Hello World</a><time datetime="2023-04-15T12:52:02.200Z" title="发表于 2023-04-15 20:52:02">2023-04-15</time></div></div></div></div></div></div></main><footer id="footer"><div id="footer-wrap"><div class="copyright">&copy;2023 By frode</div><div class="footer_custom_text">1024</div></div></footer></div><div id="rightside"><div id="rightside-config-hide"><button id="readmode" type="button" title="阅读模式"><i class="fas fa-book-open"></i></button><button id="darkmode" type="button" title="浅色和深色模式转换"><i class="fas fa-adjust"></i></button><button id="hide-aside-btn" type="button" title="单栏和双栏切换"><i class="fas fa-arrows-alt-h"></i></button></div><div id="rightside-config-show"><button id="rightside_config" type="button" title="设置"><i class="fas fa-cog fa-spin"></i></button><button class="close" id="mobile-toc-button" type="button" title="目录"><i class="fas fa-list-ul"></i></button><a id="to_comment" href="#post-comment" title="直达评论"><i class="fas fa-comments"></i></a><button id="go-up" type="button" title="回到顶部"><span class="scroll-percent"></span><i class="fas fa-arrow-up"></i></button></div></div><div><script src="/mundane/js/utils.js"></script><script src="/mundane/js/main.js"></script><script src="https://cdn.jsdelivr.net/npm/@fancyapps/ui/dist/fancybox/fancybox.umd.min.js"></script><script src="https://cdn.jsdelivr.net/npm/vanilla-lazyload/dist/lazyload.iife.min.js"></script><script src="https://cdn.jsdelivr.net/npm/node-snackbar/dist/snackbar.min.js"></script><div class="js-pjax"><script>if (!window.MathJax) {
  window.MathJax = {
    tex: {
      inlineMath: [ ['$','$'], ["\\(","\\)"]],
      tags: 'ams'
    },
    chtml: {
      scale: 1.1
    },
    options: {
      renderActions: {
        findScript: [10, doc => {
          for (const node of document.querySelectorAll('script[type^="math/tex"]')) {
            const display = !!node.type.match(/; *mode=display/)
            const math = new doc.options.MathItem(node.textContent, doc.inputJax[0], display)
            const text = document.createTextNode('')
            node.parentNode.replaceChild(text, node)
            math.start = {node: text, delim: '', n: 0}
            math.end = {node: text, delim: '', n: 0}
            doc.math.push(math)
          }
        }, ''],
        insertScript: [200, () => {
          document.querySelectorAll('mjx-container').forEach(node => {
            if (node.hasAttribute('display')) {
              btf.wrap(node, 'div', { class: 'mathjax-overflow' })
            } else {
              btf.wrap(node, 'span', { class: 'mathjax-overflow' })
            }
          });
        }, '', false]
      }
    }
  }
  
  const script = document.createElement('script')
  script.src = 'https://cdn.jsdelivr.net/npm/mathjax/es5/tex-mml-chtml.min.js'
  script.id = 'MathJax-script'
  script.async = true
  document.head.appendChild(script)
} else {
  MathJax.startup.document.state(0)
  MathJax.texReset()
  MathJax.typesetPromise()
}</script><script>function loadValine () {
  function initValine () {
    const valine = new Valine(Object.assign({
      el: '#vcomment',
      appId: 'pgTqrs90s3uW64aIgVnjZkoo-gzGzoHsz',
      appKey: 'BlDuaZ3JC6jkoZ5xHIpAUJbH',
      avatar: 'monsterid',
      serverURLs: '',
      emojiMaps: "",
      path: window.location.pathname,
      visitor: false
    }, null))
  }

  if (typeof Valine === 'function') initValine() 
  else getScript('/mundane/js/Valine.min.js').then(initValine)
}

if ('Valine' === 'Valine' || !true) {
  if (true) btf.loadComment(document.getElementById('vcomment'),loadValine)
  else setTimeout(loadValine, 0)
} else {
  function loadOtherComment () {
    loadValine()
  }
}</script></div><script async data-pjax src="//busuanzi.ibruce.info/busuanzi/2.3/busuanzi.pure.mini.js"></script><div id="local-search"><div class="search-dialog"><nav class="search-nav"><span class="search-dialog-title">搜索</span><span id="loading-status"></span><button class="search-close-button"><i class="fas fa-times"></i></button></nav><div class="is-center" id="loading-database"><i class="fas fa-spinner fa-pulse"></i><span>  数据库加载中</span></div><div class="search-wrap"><div id="local-search-input"><div class="local-search-box"><input class="local-search-box--input" placeholder="搜索文章" type="text"/></div></div><hr/><div class="no-result" id="local-search-results"></div><div id="local-search-stats-wrap"></div></div></div><div id="search-mask"></div><script src="/mundane/js/search/local-search.js"></script></div></div></body></html>