<!DOCTYPE html>
<html lang="zh">
<head>
  <meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=2">
<meta name="theme-color" content="#222">
<meta name="generator" content="Hexo 5.3.0">


  <link rel="apple-touch-icon" sizes="180x180" href="/yuwanzi.io/images/apple-touch-icon-next.png">
  <link rel="icon" type="image/png" sizes="32x32" href="/yuwanzi.io/images/favicon-32x32-next.png">
  <link rel="icon" type="image/png" sizes="16x16" href="/yuwanzi.io/images/favicon-16x16-next.png">
  <link rel="mask-icon" href="/yuwanzi.io/images/logo.svg" color="#222">

<link rel="stylesheet" href="/yuwanzi.io/css/main.css">



<link rel="stylesheet" href="//cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@5.15.1/css/all.min.css">
  <link rel="stylesheet" href="//cdn.jsdelivr.net/npm/animate.css@3.1.1/animate.min.css">

<script class="hexo-configurations">
    var NexT = window.NexT || {};
    var CONFIG = {"hostname":"suyuhuan.gitee.io","root":"/yuwanzi.io/","images":"/yuwanzi.io/images","scheme":"Muse","version":"8.2.0","exturl":false,"sidebar":{"position":"left","display":"post","padding":18,"offset":12},"copycode":false,"bookmark":{"enable":false,"color":"#222","save":"auto"},"fancybox":false,"mediumzoom":false,"lazyload":false,"pangu":false,"comments":{"style":"tabs","active":null,"storage":true,"lazyload":false,"nav":null},"motion":{"enable":true,"async":false,"transition":{"post_block":"fadeIn","post_header":"fadeInDown","post_body":"fadeInDown","coll_header":"fadeInLeft","sidebar":"fadeInUp"}},"prism":false,"i18n":{"placeholder":"Suche...","empty":"We didn't find any results for the search: ${query}","hits_time":"${hits} results found in ${time} ms","hits":"${hits} results found"}};
  </script>
<meta name="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
<meta property="og:type" content="website">
<meta property="og:title" content="玉丸子 | Blog">
<meta property="og:url" content="https://suyuhuan.gitee.io/yuwanzi.io/page/6/index.html">
<meta property="og:site_name" content="玉丸子 | Blog">
<meta property="og:description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
<meta property="og:locale">
<meta property="article:author" content="玉丸子">
<meta name="twitter:card" content="summary">


<link rel="canonical" href="https://suyuhuan.gitee.io/yuwanzi.io/page/6/">


<script class="page-configurations">
  // https://hexo.io/docs/variables.html
  CONFIG.page = {
    sidebar: "",
    isHome : true,
    isPost : false,
    lang   : 'zh'
  };
</script>
<title>玉丸子 | Blog</title>
  




  <noscript>
  <style>
  body { margin-top: 2rem; }

  .use-motion .menu-item,
  .use-motion .sidebar,
  .use-motion .post-block,
  .use-motion .pagination,
  .use-motion .comments,
  .use-motion .post-header,
  .use-motion .post-body,
  .use-motion .collection-header {
    visibility: visible;
  }

  .use-motion .header,
  .use-motion .site-brand-container .toggle,
  .use-motion .footer { opacity: initial; }

  .use-motion .site-title,
  .use-motion .site-subtitle,
  .use-motion .custom-logo-image {
    opacity: initial;
    top: initial;
  }

  .use-motion .logo-line {
    transform: scaleX(1);
  }

  .search-pop-overlay, .sidebar-nav { display: none; }
  .sidebar-panel { display: block; }
  </style>
</noscript>

<link rel="alternate" href="/yuwanzi.io/atom.xml" title="玉丸子 | Blog" type="application/atom+xml">
</head>

<body itemscope itemtype="http://schema.org/WebPage" class="use-motion">
  <div class="headband"></div>

  <main class="main">
    <header class="header" itemscope itemtype="http://schema.org/WPHeader">
      <div class="header-inner"><div class="site-brand-container">
  <div class="site-nav-toggle">
    <div class="toggle" aria-label="Navigationsleiste an/ausschalten" role="button">
    </div>
  </div>

  <div class="site-meta">

    <a href="/yuwanzi.io/" class="brand" rel="start">
      <i class="logo-line"></i>
      <h1 class="site-title">玉丸子 | Blog</h1>
      <i class="logo-line"></i>
    </a>
  </div>

  <div class="site-nav-right">
    <div class="toggle popup-trigger">
    </div>
  </div>
</div>







</div>
        
  
  <div class="toggle sidebar-toggle" role="button">
    <span class="toggle-line"></span>
    <span class="toggle-line"></span>
    <span class="toggle-line"></span>
  </div>

  <aside class="sidebar">

    <div class="sidebar-inner sidebar-overview-active">
      <ul class="sidebar-nav">
        <li class="sidebar-nav-toc">
          Inhaltsverzeichnis
        </li>
        <li class="sidebar-nav-overview">
          Übersicht
        </li>
      </ul>

      <div class="sidebar-panel-container">
        <!--noindex-->
        <div class="post-toc-wrap sidebar-panel">
        </div>
        <!--/noindex-->

        <div class="site-overview-wrap sidebar-panel">
          <div class="site-author site-overview-item animated" itemprop="author" itemscope itemtype="http://schema.org/Person">
  <p class="site-author-name" itemprop="name">玉丸子</p>
  <div class="site-description" itemprop="description">这里是玉丸子的个人博客,与你一起发现更大的世界。</div>
</div>
<div class="site-state-wrap site-overview-item animated">
  <nav class="site-state">
      <div class="site-state-item site-state-posts">
          <a href="/yuwanzi.io/archives">
          <span class="site-state-item-count">68</span>
          <span class="site-state-item-name">Artikel</span>
        </a>
      </div>
      <div class="site-state-item site-state-categories">
            <a href="/yuwanzi.io/categories/">
        <span class="site-state-item-count">39</span>
        <span class="site-state-item-name">Kategorien</span></a>
      </div>
      <div class="site-state-item site-state-tags">
            <a href="/yuwanzi.io/tags/">
        <span class="site-state-item-count">46</span>
        <span class="site-state-item-name">schlagwörter</span></a>
      </div>
  </nav>
</div>



        </div>
      </div>
    </div>
  </aside>
  <div class="sidebar-dimmer"></div>


    </header>

    
  <div class="back-to-top" role="button">
    <i class="fa fa-arrow-up"></i>
    <span>0%</span>
  </div>

<noscript>
  <div class="noscript-warning">Theme NexT works best with JavaScript enabled</div>
</noscript>


    <div class="main-inner index posts-expand">

    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/07/29/2016-07-29-Spring-boot-autoconfigure/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/07/29/2016-07-29-Spring-boot-autoconfigure/" class="post-title-link" itemprop="url">浅谈Spring Boot自动配置的运作原理</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-07-29 18:00:00" itemprop="dateCreated datePublished" datetime="2016-07-29T18:00:00+08:00">2016-07-29</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/Java/" itemprop="url" rel="index"><span itemprop="name">Java</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/Java/Spring-Boot/" itemprop="url" rel="index"><span itemprop="name">Spring Boot</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <h3 id="Spring-Boot"><a href="#Spring-Boot" class="headerlink" title="Spring Boot"></a>Spring Boot</h3><p>&nbsp;&nbsp;Spring Boot是由Pivotal团队提供的全新框架，其设计目的是用来简化新Spring应用的初始搭建以及开发过程。它使用“习惯优于配置”的理念可以让你的项目快速运行部署。使用Spring Boot可以不用或者只需要很少的Spring配置。</p>
<p>&nbsp;&nbsp;而Spring Boot核心的功能就是自动配置。它会根据在类路径中的jar、类自动配置Bean,当我们需要配置的Bean没有被Spring Boot提供支持时,也可以自定义自动配置。</p>
<h3 id="自动配置的运作原理"><a href="#自动配置的运作原理" class="headerlink" title="自动配置的运作原理"></a>自动配置的运作原理</h3><p><strong>&nbsp;&nbsp;Spring Boot自动配置其实是基于Spring 4.x提供的条件配置(Conditional)实现的。</strong></p>
<p>&nbsp;&nbsp;有关自动配置的源码在spring-boot-autoconfigure-1.x.x.jar内,如下图:</p>
<p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f6b6vnzlfgj20co0kimyz.jpg"></p>
<h4 id="如何查看当前项目已启动和未启动的自动配置"><a href="#如何查看当前项目已启动和未启动的自动配置" class="headerlink" title="如何查看当前项目已启动和未启动的自动配置"></a>如何查看当前项目已启动和未启动的自动配置</h4><ol>
<li>在application.propertie中设置debug=true属性.</li>
<li>在运行jar时添加–debug指令.</li>
</ol>
<p>&nbsp;&nbsp;当使用以上两种任意一种方法后,启动项目会在控制台输出已启动和未启动的自动配置日志.</p>
<h4 id="SpringBootApplication"><a href="#SpringBootApplication" class="headerlink" title="@SpringBootApplication"></a>@SpringBootApplication</h4><p>&nbsp;&nbsp;生成Spring Boot项目时,会自动生成一个入口类.入口类使用了@SpringBootApplication注解,它是Spring Boot的核心注解,它是一个组合注解,核心功能由@EnableAutoConfiguration注解提供.</p>
<p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f6b772ib2sj20he0ft77h.jpg" alt="@SpringBootApplication"></p>
<h4 id="EnableAutoConfiguration"><a href="#EnableAutoConfiguration" class="headerlink" title="@EnableAutoConfiguration"></a>@EnableAutoConfiguration</h4><p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f6b772m676j20gq07qdhi.jpg" alt="@EnableAutoConfiguration"></p>
<ol>
<li>@Import注解提供导入配置的功能,它导入了EnableAutoConfigurationImportSelector.</li>
<li>EnableAutoConfigurationImportSelector使用函数SpringFactoriesLoader.loadFactoryNames扫描META-INF/spring.factories文件中声明的jar包.</li>
</ol>
<p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f6b7h23prdj213c0kh7g2.jpg"></p>
<ol start="3">
<li>spring.factories文件在spring-boot-autoconfigure-1.x.x.jar中.</li>
</ol>
<p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f6b7lru822j20ei05ggmu.jpg"></p>
<p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f6b7lryzw2j20ut0ntaok.jpg"></p>
<ol start="4">
<li>spring.factories中声明的类基本上都使用了@Conditional注解.</li>
</ol>
<h4 id="Conditional"><a href="#Conditional" class="headerlink" title="Conditional"></a>Conditional</h4><p>&nbsp;&nbsp;Spring Boot在org.springframework.boot.autoconfigure.condition包下定义了以下注解.</p>
<table>
<thead>
<tr>
<th>注解名</th>
<th>作用</th>
</tr>
</thead>
<tbody><tr>
<td>@ConditionalOnJava</td>
<td>基于JVM版本作为判断条件.</td>
</tr>
<tr>
<td>@ConditionalOnBean</td>
<td>当容器中有指定的Bean的条件下.</td>
</tr>
<tr>
<td>@ConditionalOnClass</td>
<td>当类路径下游指定的类的条件下.</td>
</tr>
<tr>
<td>@ConditionalOnExpression</td>
<td>基于SpEL表达式作为判断条件.</td>
</tr>
<tr>
<td>@ConditionalOnJndi</td>
<td>在JNDI存在的条件下查找指定的位置.</td>
</tr>
<tr>
<td>@ConditionalOnMissingBean</td>
<td>当容器中没有指定Bean的情况下.</td>
</tr>
<tr>
<td>@ConditionalOnMissingClass</td>
<td>当类路径下没有指定的类的情况下.</td>
</tr>
<tr>
<td>@ConditionalOnNotWebApplication</td>
<td>当前项目不是web项目的条件下.</td>
</tr>
<tr>
<td>@ConditionalOnProperty</td>
<td>指定的属性是否有指定的值.</td>
</tr>
<tr>
<td>@ConditionalOnResource</td>
<td>类路径是否有指定的值.</td>
</tr>
<tr>
<td>@ConditionalOnSingleCandidate</td>
<td>当指定Bean在容器中只有一个,或者虽然有多个但是指定首选的Bean.</td>
</tr>
<tr>
<td>@ConditionalOnWebApplication</td>
<td>当前项目是web项目的条件下.</td>
</tr>
</tbody></table>
<p>&nbsp;&nbsp;以上这些注解都组合了@Conditional元注解.</p>
<h3 id="分析-ConditionalOnNotWebApplication"><a href="#分析-ConditionalOnNotWebApplication" class="headerlink" title="分析@ConditionalOnNotWebApplication"></a>分析@ConditionalOnNotWebApplication</h3><p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f6b8n61hytj20fb04dgmt.jpg"></p>
<p>&nbsp;&nbsp;@ConditionalOnNotWebApplication使用的条件类是OnWebApplicationCondition.</p>
<figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">package</span> org.springframework.boot.autoconfigure.condition;</span><br><span class="line"></span><br><span class="line"><span class="keyword">import</span> org.springframework.boot.autoconfigure.condition.ConditionOutcome;</span><br><span class="line"><span class="keyword">import</span> org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;</span><br><span class="line"><span class="keyword">import</span> org.springframework.boot.autoconfigure.condition.SpringBootCondition;</span><br><span class="line"><span class="keyword">import</span> org.springframework.context.annotation.ConditionContext;</span><br><span class="line"><span class="keyword">import</span> org.springframework.core.annotation.Order;</span><br><span class="line"><span class="keyword">import</span> org.springframework.core.type.AnnotatedTypeMetadata;</span><br><span class="line"><span class="keyword">import</span> org.springframework.util.ClassUtils;</span><br><span class="line"><span class="keyword">import</span> org.springframework.util.ObjectUtils;</span><br><span class="line"><span class="keyword">import</span> org.springframework.web.context.WebApplicationContext;</span><br><span class="line"><span class="keyword">import</span> org.springframework.web.context.support.StandardServletEnvironment;</span><br><span class="line"></span><br><span class="line"><span class="meta">@Order(-2147483628)</span></span><br><span class="line"><span class="class"><span class="keyword">class</span> <span class="title">OnWebApplicationCondition</span> <span class="keyword">extends</span> <span class="title">SpringBootCondition</span> </span>&#123;</span><br><span class="line">    <span class="keyword">private</span> <span class="keyword">static</span> <span class="keyword">final</span> String WEB_CONTEXT_CLASS = <span class="string">&quot;org.springframework.web.context.support.GenericWebApplicationContext&quot;</span>;</span><br><span class="line"></span><br><span class="line">    OnWebApplicationCondition() &#123;</span><br><span class="line">    &#125;</span><br><span class="line"></span><br><span class="line">    <span class="function"><span class="keyword">public</span> ConditionOutcome <span class="title">getMatchOutcome</span><span class="params">(ConditionContext context, AnnotatedTypeMetadata metadata)</span> </span>&#123;</span><br><span class="line">        <span class="keyword">boolean</span> webApplicationRequired = metadata.isAnnotated(ConditionalOnWebApplication.class.getName());</span><br><span class="line">        ConditionOutcome webApplication = <span class="keyword">this</span>.isWebApplication(context, metadata);</span><br><span class="line">        <span class="keyword">return</span> webApplicationRequired &amp;&amp; !webApplication.isMatch()?ConditionOutcome.noMatch(webApplication.getMessage()):(!webApplicationRequired &amp;&amp; webApplication.isMatch()?ConditionOutcome.noMatch(webApplication.getMessage()):ConditionOutcome.match(webApplication.getMessage()));</span><br><span class="line">    &#125;</span><br><span class="line"></span><br><span class="line">    <span class="function"><span class="keyword">private</span> ConditionOutcome <span class="title">isWebApplication</span><span class="params">(ConditionContext context, AnnotatedTypeMetadata metadata)</span> </span>&#123;</span><br><span class="line">        <span class="keyword">if</span>(!ClassUtils.isPresent(<span class="string">&quot;org.springframework.web.context.support.GenericWebApplicationContext&quot;</span>, context.getClassLoader())) &#123;</span><br><span class="line">            <span class="keyword">return</span> ConditionOutcome.noMatch(<span class="string">&quot;web application classes not found&quot;</span>);</span><br><span class="line">        &#125; <span class="keyword">else</span> &#123;</span><br><span class="line">            <span class="keyword">if</span>(context.getBeanFactory() != <span class="keyword">null</span>) &#123;</span><br><span class="line">                String[] scopes = context.getBeanFactory().getRegisteredScopeNames();</span><br><span class="line">                <span class="keyword">if</span>(ObjectUtils.containsElement(scopes, <span class="string">&quot;session&quot;</span>)) &#123;</span><br><span class="line">                    <span class="keyword">return</span> ConditionOutcome.match(<span class="string">&quot;found web application \&#x27;session\&#x27; scope&quot;</span>);</span><br><span class="line">                &#125;</span><br><span class="line">            &#125;</span><br><span class="line"></span><br><span class="line">            <span class="keyword">return</span> context.getEnvironment() <span class="keyword">instanceof</span> StandardServletEnvironment?ConditionOutcome.match(<span class="string">&quot;found web application StandardServletEnvironment&quot;</span>):(context.getResourceLoader() <span class="keyword">instanceof</span> WebApplicationContext?ConditionOutcome.match(<span class="string">&quot;found web application WebApplicationContext&quot;</span>):ConditionOutcome.noMatch(<span class="string">&quot;not a web application&quot;</span>));</span><br><span class="line">        &#125;</span><br><span class="line">    &#125;</span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<p>&nbsp;&nbsp;OnWebApplicationCondition在isWebApplication函数中进行条件判断.</p>
<ol>
<li>判断GenericWebApplicationContext是否在类路径中.</li>
<li>判断容器中是否存在名为session的scope.</li>
<li>判断当前容器的Environment是否为StandardServletEnvironment.</li>
<li>判断当前的ResourceLoader是否为WebApplicationContext.</li>
<li>最后通过ConditionOutcome.isMatch函数返回布尔值确定条件.</li>
</ol>
<h3 id="分析自动配置的实现"><a href="#分析自动配置的实现" class="headerlink" title="分析自动配置的实现"></a>分析自动配置的实现</h3><p>&nbsp;&nbsp;以http编码为例,如果在常规项目中则需要在web.xml中配置一个filter.而Spring Boot内置了http编码的自动配置,无需配置filter.</p>
<h4 id="properties配置类"><a href="#properties配置类" class="headerlink" title="properties配置类"></a>properties配置类</h4><p><img src="http://ww2.sinaimg.cn/mw690/63503acbjw1f6b9s692hxj20p30geq7e.jpg"></p>
<h4 id="自动配置Bean"><a href="#自动配置Bean" class="headerlink" title="自动配置Bean"></a>自动配置Bean</h4><p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f6b9q6z1y8j20wc0idwkd.jpg"></p>
<p>&nbsp;&nbsp;@ConditionalOnProperty:当设置spring.http.encoding=enabled的情况下,如果没有设置则默认为true,即符合条件.</p>
<p>&nbsp;&nbsp;characterEncodingFilter()返回OrderedCharacterEncodingFilter这个对象,并根据注入的HttpEncodingProperties配置类设置参数.</p>
<p>&nbsp;&nbsp; @ConditionalOnMissingBean({CharacterEncodingFilter.class}):在容器中没有这个Bean的时候则新建这个Bean.</p>
<h3 id="end"><a href="#end" class="headerlink" title="end"></a>end</h3><blockquote>
<p>资料参考于 JavaEE开发的颠覆者: Spring Boot实战</p>
</blockquote>

      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/07/19/2016-07-19-Hadoop06-Storm/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/07/19/2016-07-19-Hadoop06-Storm/" class="post-title-link" itemprop="url">Hadoop学习笔记(6)-Storm</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-07-19 18:00:00" itemprop="dateCreated datePublished" datetime="2016-07-19T18:00:00+08:00">2016-07-19</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/" itemprop="url" rel="index"><span itemprop="name">大数据</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/Storm/" itemprop="url" rel="index"><span itemprop="name">Storm</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f5z3iwds07j207g03nmxf.jpg"></p>
<h3 id="概述"><a href="#概述" class="headerlink" title="概述"></a>概述</h3><p>&nbsp;&nbsp;Storm是一个开源的分布式实时计算系统，可以简单、可靠的处理大量的数据流。被称作“实时的hadoop”。Storm有很多使用场景：如实时分析，在线机器学习，持续计算， 分布式RPC，ETL等等。Storm支持水平扩展，具有高容错性，保证每个消息都会得到处理，而且处理速度很快。Storm的部署和运维都很便捷，而且更为重要的是可以使用任意编程语言来开发应用。</p>
<h3 id="Storm的特点"><a href="#Storm的特点" class="headerlink" title="Storm的特点"></a>Storm的特点</h3><p><strong>简单的编程模型</strong></p>
<p>&nbsp;&nbsp;在大数据处理方面相信大家对hadoop已经耳熟能详，基于Google Map/Reduce来实现的Hadoop为开发者提供了map、reduce原语，使并行批处理程序变得非常地简单和优美。</p>
<p>&nbsp;&nbsp;同样，Storm也为大数据 的实时计算提供了一些简单优美的原语，这大大降低了开发并行实时处理的任务的复杂性，帮助你快速、高效的开发应用。</p>
<p><strong>水平扩展</strong></p>
<p>&nbsp;&nbsp;在Storm集群中真正运行topology的主要有三个实体：工作进程、线程和任务。Storm集群中的每台机器上都可以运行多个工作进程，每个 工作进程又可创建多个线程，每个线程可以执行多个任务，任务是真正进行数据处理的实体，我们开发的spout、bolt就是作为一个或者多个任务的方式执行的。</p>
<p>&nbsp;&nbsp;计算任务在多个线程、进程和服务器之间并行进行，支持灵活的水平扩展。</p>
<p><strong>支持多种编程语言</strong></p>
<p>&nbsp;&nbsp;你可以在Storm之上使用各种编程语言。默认支持Clojure、Java、Ruby和Python。要增加对其他语言的支持，只需实现一个简单的Storm通信协议即可。</p>
<p><strong>高可靠性</strong></p>
<p>&nbsp;&nbsp;Storm保证每个消息至少能得到一次完整处理。任务失败时，它会负责从消息源重试消息。</p>
<p>&nbsp;&nbsp;spout发出的消息后续可能会触发产生成千上万条消息，可以形象的理解为一棵消息树，其中spout发出的消息为树根，Storm会跟踪这棵消息树的处理情况，只有当这棵消息树中的所有消息都被处理了，Storm才会认为spout发出的这个消息已经被“完全处理”。如果这棵消息树中的任何一个消息处理失败了，或者整棵消息树在限定的时间内没有“完全处理”，那么spout发出的消息就会重发。</p>
<p><strong>高容错性</strong></p>
<p>&nbsp;&nbsp;Storm会管理工作进程和节点的故障。</p>
<p>&nbsp;&nbsp;如果在消息处理过程中出了一些异常，Storm会重新安排这个出问题的处理单元。Storm保证一个处理单元永远运行（除非你显式杀掉这个处理单元）。</p>
<p>&nbsp;&nbsp;当然，如果处理单元中存储了中间状态，那么当处理单元重新被Storm启动的时候，需要应用自己处理中间状态的恢复。</p>
<p><strong>本地模式</strong></p>
<p>&nbsp;&nbsp;Storm有一个“本地模式”，可以在处理过程中完全模拟Storm集群。这让你可以快速进行开发和单元测试。</p>
<h3 id="Storm架构"><a href="#Storm架构" class="headerlink" title="Storm架构"></a>Storm架构</h3><p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5za3m6lqaj20go0f5gn1.jpg"></p>
<p>&nbsp;&nbsp;Storm集群由一个主节点和多个工作节点组成。主节点运行了一个名为“Nimbus”的守护进程，用于分配代码、布置任务及故障检测。每个工作节 点都运行了一个名为“Supervisor”的守护进程，用于监听工作，开始并终止工作进程。Nimbus和Supervisor都能快速失败，而且是无状态的，这样一来它们就变得十分健壮，两者的协调工作是由ApacheZooKeeper来完成的。</p>
<h4 id="Stream"><a href="#Stream" class="headerlink" title="Stream"></a>Stream</h4><p>&nbsp;&nbsp;Stream是一个数据流的抽象。这是一个没有边界的Tuple序列,而这些Tuple序列会以一种分布式的方式并行地创建和处理。</p>
<p>&nbsp;&nbsp;对消息流的定义主要就是对消息流里面的tuple 进行定义，为了更好地使用tuple，需要给tuple 里的每个字段取一个名字，并且不同的tuple 字段对应的类型要相同，即两个tuple 的第一个字段类型相同，第二个字段类型相同，但是第一个字段和第二个字段的类型可以不同。默认情况下，tuple 的字段类型可以为integer、long、short、byte、string、double、float、boolean 和byte array 等基本类型，也可以自定义类型，只需要实现相应的序列化接口。</p>
<p>&nbsp;&nbsp;每一个消息流在定义的时候需要被分配一个id，最常见的消息流是单向的消息流，在Storm 中OutputFieldsDeclarer 定义了一些方法，让你可以定义一个Stream 而不用指定这个id。在这种情况下，这个Stream 会有个默认的id: 1。</p>
<h4 id="Topologies"><a href="#Topologies" class="headerlink" title="Topologies"></a>Topologies</h4><p>&nbsp;&nbsp;<strong>Topology是由Stream Grouping连接起来的Spout和Bolt节点网络。</strong></p>
<p>&nbsp;&nbsp;在 Storm 中，一个实时计算应用程序的逻辑被封装在一个称为Topology 的对象中，也称为计算拓扑。Topology 有点类似于Hadoop 中的MapReduce Job，但是它们之间的关键区别在于，一个MapReduce Job 最终总是会结束的，然而一个Storm 的Topology 会一直运行。在逻辑上，一个Topology 是由一些Spout（消息的发送者）和Bolt（消息的处理者）组成图状结构，而链接Spouts 和Bolts 的则是Stream Groupings。</p>
<h4 id="Spouts-amp-Bolts"><a href="#Spouts-amp-Bolts" class="headerlink" title="Spouts&amp;Bolts"></a>Spouts&amp;Bolts</h4><p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5za9p00fyj20mo0a975d.jpg"></p>
<p><strong>Spouts</strong></p>
<p>&nbsp;&nbsp;Spouts 是Storm集群中一个计算任务（Topology）中消息流的生产者，Spouts一般是从别的数据源（例如，数据库或者文件系统）加载数据，然后向Topology中发射消息。</p>
<p>&nbsp;&nbsp;Spouts即可以是可靠的,也可以是不可靠的。</p>
<p>&nbsp;&nbsp;在一个Topology中存在两种Spouts，一种是可靠的Spouts，一种是非可靠的Spouts，可靠的Spouts 在一个tuple 没有成功处理的时候会重新发射该tuple，以保证消息被正确地处理。不可靠的Spouts 在发射一个tuple 之后，不会再重新发射该tuple，即使该tuple 处理失败。每个Spouts 都可以发射多个消息流，要实现这样的效果，可以使用OutFieldsDeclarer.declareStream 来定义多个Stream，然后使用SpoutOutputCollector 来发射指定的Stream。</p>
<p>&nbsp;&nbsp;在Storm 的编程接口中，Spout 类最重要的方法是nextTuple()方法，使用该方法可以发射一个消息tuple 到Topology 中，或者简单地直接返回，如果没有消息要发射。需要注意的是，nextTuple 方法的实现不能阻塞Spout，因为Storm在同一线程上调用Spout 的所有方法。Spout 类的另外两个重要的方法是ack()和fail()，一个tuple 被成功处理完成后，ack()方法被调用，否则就调用fail()方法。注意，只有对于可靠的Spout，才会调用ack()和fail()方法。</p>
<p><strong>Bolts</strong></p>
<p>&nbsp;&nbsp;所有消息处理的逻辑都在Bolt 中完成，在Bolt 中可以完成如过滤、分类、聚集、计算、查询数据库等操作。Bolt 可以做简单的消息处理操作，例如，Bolt 可以不做任何操作，只是将接收到的消息转发给其他的Bolt。Bolt 也可以做复杂的消息流的处理，从而需要很多个Bolt。在实际使用中，一条消息往往需要经过多个处理步骤，例如，计算一个班级中成绩在前十名的同学，首先需要对所有同学的成绩进行排序，然后在排序过的成绩中选出前十名的<br>成绩的同学。所以在一个Topology 中，往往有很多个Bolt，从而形成了复杂的流处理网络。</p>
<p>&nbsp;&nbsp;Bolts可以发射多条消息流。</p>
<ol>
<li>使用OutputFieldsDeclarer.declareStream定义Stream。</li>
<li>使用OutputCollector.emit来选择要发射的Stream。</li>
</ol>
<p>&nbsp;&nbsp;Bolts的主要方法是execute。</p>
<p>&nbsp;&nbsp;Bolts以Tuple作为输入,使用OutputCollector来发射Tuple,通过调用OutputCollector.ack()通知这个Tuple的发射者Spout。</p>
<p>&nbsp;&nbsp;Bolts一般流程。</p>
<p>&nbsp;&nbsp;处理一个输入Tuple,发射0个或多个Tuple,然后调用ack()通知Storm自己已经处理过这个Tuple了。Storm提供了一个IBasicBolt会自动调用ack()。</p>
<h4 id="Stream-Groupings"><a href="#Stream-Groupings" class="headerlink" title="Stream Groupings"></a>Stream Groupings</h4><p>&nbsp;&nbsp;定义一个 Topology 的其中一步是定义每个Bolt 接收什么样的流作为输入。Stream Grouping 就是用来定义一个Stream 应该如何分配给Bolts 上面的多个Tasks。</p>
<p>&nbsp;&nbsp;Storm里有7种类型的Stream Grouping。</p>
<ol>
<li>Shuffle Grouping 随机分组,随机派发Stream里面的Tuple,保证每个Bolt接收到的Tuple数量大致相同。</li>
<li>Fields Grouping 按字段分组,以id举例。具有相同id的Tuple会被分到相同的Bolt中的一个Task,而不同id的Tuple会被分到不同的Bolt中的Task。</li>
<li>All Grouping 广播,对于每一个Tuple,所有的Bolts都会收到。</li>
<li>Global Grouping 全局分组,这个Tuple被分配到Storm中的一个Bolt的其中一个Task。具体一点就是分配给id值最低的那个Task。</li>
<li>Non Grouping 不分组,Stream不关心到底谁会收到它的Tuple。目前这种分组和Shuffle Grouping是一样的效果,有一点不同的是Storm会把这个Bolt放到这个Bolt的订阅者同一个线程中去执行。</li>
<li>Direct Grouping 直接分组,这是一种比较特别的分组方法,用这种分组意味着消息的发送者指定由消息接收者的哪个Task处理这个消息。只有被声明为Direct Stream的消息流可以声明这种分组方法。而且这种消息Tuple必须使用emitDirect方法来发射。消息处理者可以通过TopologyContext来获取处理它的消息的Task的id(OutputCollector.emit方法也会返回Task的id)。</li>
<li>Local or Shuffle Grouping 如果目标Bolt有一个或者多个Task在同一个工作进程中,Tuple将会被随机发射给这些Tasks。否则,和普通的Shuffle Grouping行为一致。</li>
</ol>
<h4 id="Workers"><a href="#Workers" class="headerlink" title="Workers"></a>Workers</h4><p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f5zdzh1eo9j20bq08h0t6.jpg"></p>
<ol>
<li>每个Supervisor中运行着多个Workers进程。</li>
<li>每个Worker进程中运行着多个Executor线程。</li>
<li>每个Executor线程中运行着若干个相同的Task(Spout/Bolt)。</li>
</ol>
<p>&nbsp;&nbsp;一个 Topology 可能会在一个或者多个工作进程里面执行，每个工作进程执行整个Topology 的一部分。比如，对于并行度是300 的Topology 来说，如果我们使用50 个工作进程来执行，那么每个工作进程会处理其中的6 个Tasks（其实就是每个工作进程里面分配6 个线程）。Storm 会尽量均匀地把工作分配给所有的工作进程。</p>
<h4 id="Task"><a href="#Task" class="headerlink" title="Task"></a>Task</h4><p>&nbsp;&nbsp;在 Storm 集群上，每个Spout 和Bolt 都是由很多个Task 组成的，每个Task对应一个线程，流分组策略就是定义如何从一堆Task 发送tuple 到另一堆Task。在实现自己的Topology 时可以调用TopologyBuilder.setSpout() 和TopBuilder.setBolt()方法来设置并行度，也就是有多少个Task。</p>
<h3 id="Storm安装部署"><a href="#Storm安装部署" class="headerlink" title="Storm安装部署"></a>Storm安装部署</h3><ol>
<li>安装jdk。</li>
<li>搭建Zookeeper集群。</li>
<li>下载并解压Storm。</li>
<li>修改storm.yaml配置文件。<ul>
<li>storm.zookeeper.servers: Storm集群使用的Zookeeper集群地址。例如:<br>storm.zookeeper.servers:<br>-“192.168.145.141”<br>-“192.168.145.142”</li>
<li>如果Zookeeper没有使用默认端口,那么还需要修改storm.zookeeper.port。</li>
<li>storm.local.dir Nimbus和Supervisor进程用于存储少量状态,如jars、confs等的本地磁盘目录,需要提前创建该目录并给予足够的访问权限。然后在storm.yaml中配置该目录,例如:<br>storm.local.dir:”/home/application/storm/workdir”</li>
</ul>
</li>
</ol>
<h4 id="注意事项"><a href="#注意事项" class="headerlink" title="注意事项"></a>注意事项</h4><p>&nbsp;&nbsp;启动Storm后台进程时,需要对conf/storm.yaml配置文件中设置的storm.local.dir目录具有写权限。</p>
<p>&nbsp;&nbsp;Storm后台进程被启动时,将在Storm安装目录下的logs/子目录下生成各个进程的日志文件。</p>
<p>&nbsp;&nbsp;Storm UI必须和Storm Nimbus部署在同一台机器上,否则UI无法正常工作,因为UI进程会检查本机是否存在Nimbus链接。</p>
<h3 id="常用命令"><a href="#常用命令" class="headerlink" title="常用命令"></a>常用命令</h3><table>
<thead>
<tr>
<th>命令描述</th>
<th>格式</th>
<th>例子</th>
</tr>
</thead>
<tbody><tr>
<td>启动Nimbus</td>
<td>storm nimbus</td>
<td>storm nimbus</td>
</tr>
<tr>
<td>启动Supervisor</td>
<td>storm supervisor</td>
<td>storm supervisor</td>
</tr>
<tr>
<td>启动UI</td>
<td>storm ui</td>
<td>storm ui</td>
</tr>
</tbody></table>
<h4 id="提交Topologies"><a href="#提交Topologies" class="headerlink" title="提交Topologies"></a>提交Topologies</h4><p><strong>格式</strong> </p>
<p>storm jar 【jar路径】 【拓扑包名.拓扑类名】【stormIP地址】【storm端口】【拓扑名称】【参数】</p>
<p><strong>Example</strong></p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">storm jar &#x2F;home&#x2F;storm&#x2F;hello.jar</span><br><span class="line">storm.hello.WordCountTopology wordcountTop</span><br><span class="line">提交hello.jar到远程集群,并启动wordcountTop拓扑</span><br></pre></td></tr></table></figure>
<h4 id="停止Topologies"><a href="#停止Topologies" class="headerlink" title="停止Topologies"></a>停止Topologies</h4><p><strong>格式</strong></p>
<p>storm kill [拓扑名称]</p>
<p><strong>Example</strong></p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">storm kill wordcountTop</span><br></pre></td></tr></table></figure>
<h3 id="API"><a href="#API" class="headerlink" title="API"></a>API</h3><h4 id="Spouts"><a href="#Spouts" class="headerlink" title="Spouts"></a>Spouts</h4><p>&nbsp;&nbsp; Spout是Stream的消息产生源， Spout组件的实现可以通过继承BaseRichSpout类或者其他*Spout类来完成，也可以通过实现IRichSpout接口来实现。</p>
<p><strong>open</strong></p>
<p>&nbsp;&nbsp;当一个Task被初始化的时候会调用open()。一般都会在此方法中对发送Tuple的对象SpoutOutputCollector和配置对象TopologyContext初始化。</p>
<p><strong>getComponentConfiguration</strong></p>
<p>&nbsp;&nbsp;此方法用于声明针对当前组件的特殊的Configuration配置。</p>
<p><strong>nextTuple</strong></p>
<p>&nbsp;&nbsp;这是Spout类中最重要的一个方法。发射一个Tuple到Topology都是通过这个方法来实现的。</p>
<p><strong>declareOutputFields</strong></p>
<p>&nbsp;&nbsp;此方法用于声明当前Spout的Tuple发送流。Stream的定义是通过OutputFieldsDeclare.declare方法完成的,其中的参数包括了发送的Fields。</p>
<p>&nbsp;&nbsp;另外，除了上述几个方法之外，还有ack、fail和close方法等。</p>
<p>&nbsp;&nbsp;Storm在监测到一个Tuple被成功处理之后会调用ack方法，处理失败会调用fail方法。这两个方法在BaseRichSpout等类中已经被隐式的实现了。</p>
<h4 id="Bolts"><a href="#Bolts" class="headerlink" title="Bolts"></a>Bolts</h4><p>&nbsp;&nbsp; Bolt类接收由Spout或者其他上游Bolt类发来的Tuple，对其进行处理。Bolt组件的实现可以通过继承BasicRichBolt类或者IRichBolt接口来完成。</p>
<p><strong>prepare</strong></p>
<p>&nbsp;&nbsp;此方法与Spouts的open方法类似,为Bolt提供了OutputCollector,用来从Bolt中发射Tuple。Bolt中Tuple的发射可以在prepare中、execute中、cleanup等方法中进行,一般都是在execute中。</p>
<p><strong>getComponentConfiguration</strong></p>
<p>&nbsp;&nbsp;与Spouts类似。</p>
<p><strong>execute</strong></p>
<p>&nbsp;&nbsp;  这是Bolt中最关键的一个方法，对于Tuple的处理都可以放到此方法中进行。具体的发送也是在execute中通过调用emit方法来完成的。</p>
<p>&nbsp;&nbsp;emit有两种情况，一种是emit方法中有两个参数，另一个种是有一个参数。</p>
<ol>
<li>emit有一个参数：此唯一的参数是发送到下游Bolt的Tuple，此时，由上游发来的旧的Tuple在此隔断，新的Tuple和旧的Tuple不再属于同一棵Tuple树。新的Tuple另起一个新的Tuple树。</li>
<li>emit有两个参数：第一个参数是旧的Tuple的输入流，第二个参数是发往下游Bolt的新的Tuple流。此时，新的Tuple和旧的Tuple是仍然属于同一棵Tuple树，即，如果下游的Bolt处理Tuple失败，则会向上传递到当前Bolt，当前Bolt根据旧的Tuple流继续往上游传递，申请重发失败的Tuple。保证Tuple处理的可靠性。</li>
</ol>
<p><strong>declareOutputFields</strong></p>
<p>&nbsp;&nbsp;用于声明当前Bolt发送的Tuple中包含的字段。</p>
<h4 id="Topology-Example"><a href="#Topology-Example" class="headerlink" title="Topology Example"></a>Topology Example</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">RandomWordSpout</span> <span class="keyword">extends</span> <span class="title">BaseRichSpout</span> </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="comment">// 初始化数据字典</span></span><br><span class="line">	<span class="keyword">private</span> <span class="keyword">final</span> <span class="keyword">static</span> String[] words = &#123; <span class="string">&quot;java&quot;</span>, <span class="string">&quot;c&quot;</span>, <span class="string">&quot;c++&quot;</span>, <span class="string">&quot;c#&quot;</span>, <span class="string">&quot;python&quot;</span>, <span class="string">&quot;go&quot;</span>, <span class="string">&quot;javascript&quot;</span>,</span><br><span class="line">			<span class="string">&quot;swift&quot;</span> &#125;;</span><br><span class="line"></span><br><span class="line">	<span class="keyword">private</span> SpoutOutputCollector collector;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">nextTuple</span><span class="params">()</span> </span>&#123;</span><br><span class="line">		Random random = <span class="keyword">new</span> Random();</span><br><span class="line">		<span class="comment">// 获取随机的单词</span></span><br><span class="line">		String word = words[random.nextInt(words.length)];</span><br><span class="line">		<span class="comment">// 发射消息</span></span><br><span class="line">		<span class="keyword">this</span>.collector.emit(<span class="keyword">new</span> Values(word));</span><br><span class="line">		<span class="comment">// 休息2秒</span></span><br><span class="line">		Utils.sleep(<span class="number">2000</span>);</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">open</span><span class="params">(Map arg0, TopologyContext arg1, SpoutOutputCollector collector)</span> </span>&#123;</span><br><span class="line">		<span class="keyword">this</span>.collector = collector;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">declareOutputFields</span><span class="params">(OutputFieldsDeclarer declarer)</span> </span>&#123;</span><br><span class="line">		<span class="comment">// 声明字段名</span></span><br><span class="line">		declarer.declare(<span class="keyword">new</span> Fields(<span class="string">&quot;initName&quot;</span>));</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">UpperBolt</span> <span class="keyword">extends</span> <span class="title">BaseBasicBolt</span> </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">execute</span><span class="params">(Tuple tuple, BasicOutputCollector collector)</span> </span>&#123;</span><br><span class="line">		<span class="comment">// 获得上个bolt传入的initName</span></span><br><span class="line">		String initName = tuple.getString(<span class="number">0</span>);</span><br><span class="line">		<span class="comment">// 将initName转为大写</span></span><br><span class="line">		String upperCase = initName.toUpperCase();</span><br><span class="line">		<span class="comment">// 发射消息</span></span><br><span class="line">		collector.emit(<span class="keyword">new</span> Values(upperCase));</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">declareOutputFields</span><span class="params">(OutputFieldsDeclarer declarer)</span> </span>&#123;</span><br><span class="line">		declarer.declare(<span class="keyword">new</span> Fields(<span class="string">&quot;upperName&quot;</span>));</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">PrefixBolt</span> <span class="keyword">extends</span> <span class="title">BaseBasicBolt</span> </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="keyword">private</span> FileWriter fileWriter;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">prepare</span><span class="params">(Map stormConf, TopologyContext context)</span> </span>&#123;</span><br><span class="line">		<span class="comment">// 初始化fileWriter</span></span><br><span class="line">		<span class="keyword">try</span> &#123;</span><br><span class="line">			<span class="keyword">this</span>.fileWriter = <span class="keyword">new</span> FileWriter(<span class="string">&quot;/home/storm/output/&quot;</span> + UUID.randomUUID());</span><br><span class="line">		&#125; <span class="keyword">catch</span> (IOException e) &#123;</span><br><span class="line">			e.printStackTrace();</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">execute</span><span class="params">(Tuple tuple, BasicOutputCollector collector)</span> </span>&#123;</span><br><span class="line">		String upperName = tuple.getString(<span class="number">0</span>);</span><br><span class="line">		<span class="comment">// 添加前缀</span></span><br><span class="line">		String finalName = <span class="string">&quot;hello-&quot;</span> + upperName;</span><br><span class="line">		<span class="comment">// write</span></span><br><span class="line">		<span class="keyword">try</span> &#123;</span><br><span class="line">			<span class="keyword">this</span>.fileWriter.write(finalName);</span><br><span class="line">			<span class="keyword">this</span>.fileWriter.write(<span class="string">&quot;\n&quot;</span>);</span><br><span class="line">			<span class="keyword">this</span>.fileWriter.flush();</span><br><span class="line">		&#125; <span class="keyword">catch</span> (IOException e) &#123;</span><br><span class="line">			e.printStackTrace();</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">declareOutputFields</span><span class="params">(OutputFieldsDeclarer declarer)</span> </span>&#123;</span><br><span class="line"></span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">TopologyMain</span> </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">static</span> <span class="keyword">void</span> <span class="title">main</span><span class="params">(String[] args)</span> <span class="keyword">throws</span> Exception </span>&#123;</span><br><span class="line">		TopologyBuilder topologyBuilder = <span class="keyword">new</span> TopologyBuilder();</span><br><span class="line">		<span class="comment">// 设置Spout</span></span><br><span class="line">		topologyBuilder.setSpout(<span class="string">&quot;randomWordSpout&quot;</span>, <span class="keyword">new</span> RandomWordSpout());</span><br><span class="line">		<span class="comment">// 设置Bolt</span></span><br><span class="line">		topologyBuilder.setBolt(<span class="string">&quot;upperBolt&quot;</span>, <span class="keyword">new</span> UpperBolt()).shuffleGrouping(<span class="string">&quot;randomWordSpout&quot;</span>);</span><br><span class="line">		topologyBuilder.setBolt(<span class="string">&quot;prefixBolt&quot;</span>, <span class="keyword">new</span> PrefixBolt()).shuffleGrouping(<span class="string">&quot;upperBolt&quot;</span>);</span><br><span class="line"></span><br><span class="line">		Config config = <span class="keyword">new</span> Config();</span><br><span class="line">		<span class="comment">// 设置Workers数量</span></span><br><span class="line">		config.setNumWorkers(<span class="number">4</span>);</span><br><span class="line">		config.setDebug(<span class="keyword">true</span>);</span><br><span class="line">		<span class="comment">// 提交Topology</span></span><br><span class="line">		StormSubmitter.submitTopology(<span class="string">&quot;randomTopo&quot;</span>, config, topologyBuilder.createTopology());</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/07/18/2016-07-18-Hadoop05-Hive/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/07/18/2016-07-18-Hadoop05-Hive/" class="post-title-link" itemprop="url">Hadoop学习笔记(5)-Hive</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-07-18 18:00:00" itemprop="dateCreated datePublished" datetime="2016-07-18T18:00:00+08:00">2016-07-18</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/" itemprop="url" rel="index"><span itemprop="name">大数据</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/Hadoop/" itemprop="url" rel="index"><span itemprop="name">Hadoop</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <p><img src="http://ww2.sinaimg.cn/mw690/63503acbjw1f5x3hcttd1j20ai0513yq.jpg"></p>
<h3 id="概述"><a href="#概述" class="headerlink" title="概述"></a>概述</h3><p>&nbsp;&nbsp;Hive是建立在Hadoop上的数据仓库基础架构。它提供了一系列的工具，用来进行数据提取、转换、加载，这是一种可以存储、查询和分析存储在Hadoop中的大规模数据机制。可以把Hadoop下结构化数据文件映射为一张成Hive中的表，并提供类sql查询功能，除了不支持更新、索引和事务，sql其它功能都支持。可以将sql语句转换为MapReduce任务进行运行，作为sql到MapReduce的映射器。提供shell、JDBC/ODBC、Thrift、Web等接口。</p>
<p>&nbsp;&nbsp;Hive 并不适合那些需要低延迟的应用，例如，联机事务处理（OLTP）。Hive 查询操作过程严格遵守Hadoop MapReduce 的作业执行模型，Hive 将用户的HiveQL 语句通过解释器转换为MapReduce 作业提交到Hadoop 集群上，Hadoop 监控作业执行过程，然后返回作业执行结果给用户。Hive 并非为联机事务处理而设计，Hive 并不提供实时的查询和基于行级的数据更新操作。Hive 的最佳使用场合是大数据集的批处理作业，例如，网络日志分析。</p>
<h3 id="元数据存储"><a href="#元数据存储" class="headerlink" title="元数据存储"></a>元数据存储</h3><p>&nbsp;&nbsp;Hive将元数据存储在RDBMS中，有三种方式可以连接到数据库。</p>
<ol>
<li>内嵌模式：元数据保持在内嵌数据库的Derby，一般用于单元测试，只允许一个会话连接。</li>
<li>多用户模式：在本地安装Mysql，把元数据放到Mysql内。</li>
<li>远程模式：元数据放置在远程的Mysql数据库。</li>
</ol>
<h3 id="数据存储"><a href="#数据存储" class="headerlink" title="数据存储"></a>数据存储</h3><p> &nbsp;&nbsp;Hive没有专门的数据存储格式，也没有为数据建立索引，用于可以非常自由的组织Hive中的表，只需要在创建表的时候告诉Hive数据中的列分隔符和行分隔符。</p>
<p> &nbsp;&nbsp;Hive中所有的数据都存储在HDFS中，Hive中包含4中数据模型：Tabel、ExternalTable、Partition、Bucket。</p>
<p> <strong>Table</strong></p>
<p> &nbsp;&nbsp;类似与传统数据库中的Table，每一个Table在Hive中都有一个相应的目录来存储数据。例如：一个表zz，它在HDFS中的路径为：/wh/zz，其中wh是在hive-site.xml中由${hive.metastore.warehouse.dir}指定的数据仓库的目录，所有的Table数据（不含External Table）都保存在这个目录中。</p>
<p> <strong>Partition</strong></p>
<p> &nbsp;&nbsp;类似于传统数据库中划分列的索引。在Hive中，表中的一个Partition对应于表下的一个目录，所有的Partition数据都存储在对应的目录中。例如：zz表中包含ds和city两个Partition，则对应于ds=20140214，city=beijing的HDFS子目录为：/wh/zz/ds=20140214/city=Beijing。</p>
<p> <strong>ExternalTable</strong></p>
<p> &nbsp;&nbsp;指向已存在HDFS中的数据，可创建Partition。和Table在元数据组织结构相同，在实际存储上有较大差异。Table创建和数据加载过程，可以用统一语句实现，实际数据被转移到数据仓库目录中，之后对数据的访问将会直接在数据仓库的目录中完成。删除表时，表中的数据和元数据都会删除。ExternalTable只有一个过程，因为加载数据和创建表是同时完成。时间数据是存储在Location后面指定的HDFS路径中的，并不会移动到数据仓库中。</p>
<p> <strong>Bcuket</strong></p>
<p> &nbsp;&nbsp;对指定列计算的hash，根据hash值切分数据，目的是为了便于并行，每一个Buckets对应一个文件。将user列分数至32个Bucket上，首先对user列的值计算hash，比如，对应hash=0的HDFS目录为：/wh/zz/ds=20140214/city=Beijing/part-00000;对应hash=20的，目录为：/wh/zz/ds=20140214/city=Beijing/part-00020。</p>
<h3 id="Hive常用优化方法"><a href="#Hive常用优化方法" class="headerlink" title="Hive常用优化方法"></a>Hive常用优化方法</h3><ol>
<li>join连接时的优化：当三个或多个以上的表进行join操作时，如果每个on使用相同的字段连接时只会产生一个mapreduce。</li>
<li>join连接时的优化：当多个表进行查询时，从左到右表的大小顺序应该是从小到大。原因：hive在对每行记录操作时会把其他表先缓存起来，直到扫描最后的表进行计算。</li>
<li>在where字句中增加分区过滤器。</li>
<li>当可以使用left semi join 语法时不要使用inner join，前者效率更高。原因：对于左表中指定的一条记录，一旦在右表中找到立即停止扫描。</li>
<li>如果所有表中有一张表足够小，则可置于内存中，这样在和其他表进行连接的时候就能完成匹配，省略掉reduce过程。设置属性即可实现，set hive.auto.covert.join=true; 用户可以配置希望被优化的小表的大小 set hive.mapjoin.smalltable.size=2500000; 如果需要使用这两个配置可置入$HOME/.hiverc文件中。</li>
<li>同一种数据的多种处理：从一个数据源产生的多个数据聚合，无需每次聚合都需要重新扫描一次。<br>例如:insert overwrite table student select *　from employee; insert overwrite table person select * from employee;<br>可以优化成 from employee insert overwrite table student select * insert overwrite table person select *</li>
<li>limit调优：limit语句通常是执行整个语句后返回部分结果。set hive.limit.optimize.enable=true;</li>
<li>开启并发执行。某个job任务中可能包含众多的阶段，其中某些阶段没有依赖关系可以并发执行，开启并发执行后job任务可以更快的完成。设置属性：set hive.exec.parallel=true;</li>
<li>hive提供的严格模式，禁止3种情况下的查询模式。<ul>
<li>当表为分区表时，where字句后没有分区字段和限制时，不允许执行。</li>
<li>当使用order by语句时，必须使用limit字段，因为order by 只会产生一个reduce任务。</li>
<li>限制笛卡尔积的查询。</li>
</ul>
</li>
<li>合理的设置map和reduce数量。</li>
<li>jvm重用。可在hadoop的mapred-site.xml中设置jvm被重用的次数。</li>
</ol>
<h3 id="安装Hive"><a href="#安装Hive" class="headerlink" title="安装Hive"></a>安装Hive</h3><ol>
<li>解压Hive。</li>
<li>将mysql的驱动jar包copy到${HIVE_HOME}/lib目录下。</li>
<li>cp hive-default.xml.template  hive-size.xml。</li>
<li>配置hive-size.xml。<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br></pre></td><td class="code"><pre><span class="line"> &lt;configuration&gt;</span><br><span class="line">	&lt;!-- 指定数据库URL --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;javax.jdo.option.ConnectionURL&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;jdbc:mysql:&#x2F;&#x2F;192.168.145.148:3306&#x2F;hive?createDatabaseIfNotExist&#x3D;true&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;javax.jdo.option.ConnectionDriverName&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;com.mysql.jdbc.Driver&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;javax.jdo.option.ConnectionUserName&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;root&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;javax.jdo.option.ConnectionPassword&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;root&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">&lt;&#x2F;configuration&gt;</span><br></pre></td></tr></table></figure>

</li>
</ol>
<h3 id="Hive-QL"><a href="#Hive-QL" class="headerlink" title="Hive QL"></a>Hive QL</h3><h4 id="文件格式"><a href="#文件格式" class="headerlink" title="文件格式"></a>文件格式</h4><p>&nbsp;&nbsp;Hive创建表可以指定四种文件格式。</p>
<ol>
<li>文本格式的数据是Hadoop中经常碰到的。如TextFile 、XML和JSON。 文本格式除了会占用更多磁盘资源外，对它的解析开销一般会比二进制格式高几十倍以上，尤其是XML 和JSON，它们的解析开销比Textfile 还要大，因此强烈不建议在生产系统中使用这些格式进行储存。如果需要输出这些格式，请在客户端做相应的转换操作。 文本格式经常会用于日志收集，数据库导入，Hive默认配置也是使用文本格式，而且常常容易忘了压缩，所以请确保使用了正确的格式。另外文本格式的一个缺点是它不具备类型和模式，比如销售金额、利润这类数值数据或者日期时间类型的数据，如果使用文本格式保存，由于它们本身的字符串类型的长短不一，或者含有负数，导致MR没有办法排序，所以往往需要将它们预处理成含有模式的二进制格式，这又导致了不必要的预处理步骤的开销和储存资源的浪费。</li>
<li>SequenceFile是Hadoop API 提供的一种二进制文件，它将数据以&lt;key,value&gt;的形式序列化到文件中。这种二进制文件内部使用Hadoop 的标准的Writable 接口实现序列化和反序列化。它与Hadoop API中的MapFile 是互相兼容的。Hive 中的SequenceFile 继承自Hadoop API 的SequenceFile，不过它的key为空，使用value 存放实际的值， 这样是为了避免MR 在运行map 阶段的排序过程。如果你用Java API 编写SequenceFile，并让Hive 读取的话，请确保使用value字段存放数据，否则你需要自定义读取这种SequenceFile 的InputFormat class 和OutputFormat class。</li>
<li>RCFile是Hive推出的一种专门面向列的数据格式。 它遵循“先按列划分，再垂直划分”的设计理念。当查询过程中，针对它并不关心的列时，它会在IO上跳过这些列。需要说明的是，RCFile在map阶段从远端拷贝仍然是拷贝整个数据块，并且拷贝到本地目录后RCFile并不是真正直接跳过不需要的列，并跳到需要读取的列， 而是通过扫描每一个row group的头部定义来实现的，但是在整个HDFS Block 级别的头部并没有定义每个列从哪个row group起始到哪个row group结束。所以在读取所有列的情况下，RCFile的性能反而没有SequenceFile高。</li>
<li>Avro是一种用于支持数据密集型的二进制文件格式。它的文件格式更为紧凑，若要读取大量数据时，Avro能够提供更好的序列化和反序列化性能。并且Avro数据文件天生是带Schema定义的，所以它不需要开发者在API 级别实现自己的Writable对象。</li>
<li>其他格式:Hadoop实际上支持任意文件格式，只要能够实现对应的RecordWriter和RecordReader即可。其中数据库格式也是会经常储存在Hadoop中，比如Hbase，Mysql，Cassandra，MongoDB。 这些格式一般是为了避免大量的数据移动和快速装载的需求而用的。他们的序列化和反序列化都是由这些数据库格式的客户端完成，并且文件的储存位置和数据布局(Data Layout)不由Hadoop控制，他们的文件切分也不是按HDFS的块大小（blocksize）进行切割。</li>
</ol>
<h4 id="create-table"><a href="#create-table" class="headerlink" title="create table"></a>create table</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br></pre></td><td class="code"><pre><span class="line">create table test_user(id int,name string) </span><br><span class="line">    &#x2F;&#x2F; 注释</span><br><span class="line">    comment &#39;This is the test table&#39;</span><br><span class="line">    row format delimited</span><br><span class="line">    &#x2F;&#x2F; 指定切分格式规则</span><br><span class="line">    fields terminated by &#39;,&#39;</span><br><span class="line">    &#x2F;&#x2F; 指定文件格式</span><br><span class="line">    stored as textfile; </span><br></pre></td></tr></table></figure>
<h4 id="insert-select"><a href="#insert-select" class="headerlink" title="insert select"></a>insert select</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">&#x2F;&#x2F;使用select语句来批量插入数据</span><br><span class="line">insert overwrite table test_user select * from tab_user;</span><br></pre></td></tr></table></figure>
<h4 id="load-data"><a href="#load-data" class="headerlink" title="load data"></a>load data</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line">&#x2F;&#x2F;从本地导入数据到hive的表中（实质就是将文件上传到hdfs中hive管理目录下）</span><br><span class="line">load data local inpath &#39;&#x2F;home&#x2F;hadoop&#x2F;test.txt&#39; into table test_user;</span><br><span class="line"></span><br><span class="line">&#x2F;&#x2F;从hdfs上导入数据到hive表中（实质就是将文件从原始目录移动到hive管理的目录下）</span><br><span class="line">load data inpath &#39;hdfs:&#x2F;&#x2F;ns1&#x2F;data.log&#39; into table test_user;</span><br></pre></td></tr></table></figure>
<h4 id="external-table"><a href="#external-table" class="headerlink" title="external table"></a>external table</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line">&#x2F;&#x2F;LOCATION指定的是hdfs路径</span><br><span class="line">&#x2F;&#x2F;如果LOCATION路径有数据,则可以直接映射数据建表</span><br><span class="line">CREATE EXTERNAL TABLE test_user_external(id int, name string)</span><br><span class="line"> ROW FORMAT DELIMITED</span><br><span class="line"> FIELDS TERMINATED BY &#39;,&#39;</span><br><span class="line"> STORED AS TEXTFILE</span><br><span class="line"> LOCATION &#39;&#x2F;external&#x2F;user&#39;;</span><br></pre></td></tr></table></figure>
<h4 id="CTAS"><a href="#CTAS" class="headerlink" title="CTAS"></a>CTAS</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line">&#x2F;&#x2F;CTAS是通过查询,然后根据查询的结果来建立表格的一种方式。</span><br><span class="line">&#x2F;&#x2F;CTAS会根据SELECT语句创建表结构,并把数据一并复制过来。</span><br><span class="line">CREATE TABLE test_user_ctas</span><br><span class="line">   AS</span><br><span class="line">SELECT id new_id, name new_name</span><br><span class="line">FROM test_user</span><br><span class="line">SORT BY new_id;</span><br></pre></td></tr></table></figure>
<h4 id="Partition"><a href="#Partition" class="headerlink" title="Partition"></a>Partition</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><span class="line">&#x2F;&#x2F;创建一个分区表,以year年份作为分区字段</span><br><span class="line">create table test_user_part(id int,name string) </span><br><span class="line">    partitioned by (year string)</span><br><span class="line">    row format delimited fields terminated by &#39;,&#39;;</span><br><span class="line"></span><br><span class="line">&#x2F;&#x2F;将data.log导入到test_user_part表中,并设置分区为1990    </span><br><span class="line">load data local inpath &#39;&#x2F;home&#x2F;hadoop&#x2F;data.log&#39; overwrite into table test_user_part</span><br><span class="line">     partition(year&#x3D;&#39;1990&#39;);</span><br><span class="line">    </span><br><span class="line">    </span><br><span class="line">load data local inpath &#39;&#x2F;home&#x2F;hadoop&#x2F;data2.log&#39; overwrite into table test_user_part</span><br><span class="line">     partition(year&#x3D;&#39;2000&#39;);</span><br></pre></td></tr></table></figure>
<h4 id="Array-amp-amp-Map"><a href="#Array-amp-amp-Map" class="headerlink" title="Array&amp;&amp;Map"></a>Array&amp;&amp;Map</h4><p>&nbsp;&nbsp;hive中的列支持使用struct、map和array集合数据类型。大多数关系型数据库中不支持这些集合数据类型，因为它们会破坏标准格式。关系型数据库中为实现集合数据类型是由多个表之间建立合适的外键关联来实现。在大数据系统中，使用集合类型的数据的好处在于提高数据的吞吐量，减少寻址次数来提高查询速度。</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br></pre></td><td class="code"><pre><span class="line">&#x2F;&#x2F;array </span><br><span class="line">create table tab_array(a array&lt;int&gt;,b array&lt;string&gt;)</span><br><span class="line">row format delimited</span><br><span class="line">fields terminated by &#39;\t&#39;</span><br><span class="line">collection items terminated by &#39;,&#39;;</span><br><span class="line"></span><br><span class="line">select a[0] from tab_array;</span><br><span class="line">select * from tab_array where array_contains(b,&#39;word&#39;);</span><br><span class="line">insert into table tab_array select array(0),array(name,ip) from tab_ext t; </span><br><span class="line"></span><br><span class="line">&#x2F;&#x2F;map</span><br><span class="line">create table tab_map(name string,info map&lt;string,string&gt;)</span><br><span class="line">row format delimited</span><br><span class="line">fields terminated by &#39;\t&#39;</span><br><span class="line">collection items terminated by &#39;,&#39;</span><br><span class="line">map keys terminated by &#39;:&#39;;</span><br><span class="line"></span><br><span class="line">load data local inpath &#39;&#x2F;home&#x2F;hadoop&#x2F;hivetemp&#x2F;tab_map.txt&#39; overwrite into table tab_map;</span><br><span class="line">insert into table tab_map select name,map(&#39;name&#39;,name,&#39;ip&#39;,ip) from tab_ext; </span><br></pre></td></tr></table></figure>
<h3 id="UDF"><a href="#UDF" class="headerlink" title="UDF"></a>UDF</h3><p>&nbsp;&nbsp;UDF即用户自定义函数(User Defined Function),Hive支持UDF进行自定义函数的编写。</p>
<p>&nbsp;&nbsp;需要先使用Java代码开发UDF,然后再把jar包导入到Hive中。</p>
<figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">FindRegionByPhone</span> <span class="keyword">extends</span> <span class="title">UDF</span> </span>&#123;</span><br><span class="line">	</span><br><span class="line">	<span class="comment">//使用map模拟数据库</span></span><br><span class="line">	<span class="keyword">private</span> <span class="keyword">static</span> HashMap&lt;String,String&gt;  dataDictionary = <span class="keyword">new</span> HashMap&lt;String,String&gt;();</span><br><span class="line">	</span><br><span class="line">	<span class="keyword">static</span>&#123;</span><br><span class="line">		dataDictionary.put(<span class="string">&quot;136&quot;</span>,<span class="string">&quot;beijing&quot;</span>);</span><br><span class="line">		dataDictionary.put(<span class="string">&quot;137&quot;</span>,<span class="string">&quot;guangzhou&quot;</span>);</span><br><span class="line">		dataDictionary.put(<span class="string">&quot;138&quot;</span>,<span class="string">&quot;shenzhen&quot;</span>);</span><br><span class="line">		dataDictionary.put(<span class="string">&quot;139&quot;</span>,<span class="string">&quot;shanghai&quot;</span>);</span><br><span class="line">	&#125;</span><br><span class="line">    </span><br><span class="line">    <span class="function"><span class="keyword">public</span> String <span class="title">evaluate</span><span class="params">(String phone)</span> </span>&#123;</span><br><span class="line">        </span><br><span class="line">        <span class="comment">// 如果没有匹配到对应的区域则返回&quot;other&quot;</span></span><br><span class="line">		<span class="keyword">return</span> areaMap.get(phone.substring(<span class="number">0</span>, <span class="number">3</span>)) == <span class="keyword">null</span> ? <span class="string">&quot;other&quot;</span> : areaMap</span><br><span class="line">				.get(phone.substring(<span class="number">0</span>, <span class="number">3</span>));</span><br><span class="line"></span><br><span class="line">	&#125;	</span><br><span class="line">	</span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h4 id="添加jar包到Hive"><a href="#添加jar包到Hive" class="headerlink" title="添加jar包到Hive"></a>添加jar包到Hive</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line">方式1:添加到hive</span><br><span class="line">hive&gt; add jar &#x2F;root&#x2F;MyUDF.jar;</span><br><span class="line"></span><br><span class="line">方式2:添加到hdfs,调用时需要指定jar包地址</span><br><span class="line">hdfs -dfs -put MyUDF.jar &#39;hdfs:&#x2F;&#x2F;&#x2F;user&#x2F;hadoop&#x2F;hiveUDF&#39;</span><br></pre></td></tr></table></figure>
<h4 id="创建临时函数"><a href="#创建临时函数" class="headerlink" title="创建临时函数"></a>创建临时函数</h4><p>&nbsp;&nbsp;临时函数只在当前session中有效,临时函数不能指定库。</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">create temporary function testUDF as &#39;cn.sylvanas.hive.udf.FindRegionByPhone&#39; using jar &#39;hdfs:&#x2F;&#x2F;&#x2F;user&#x2F;hadoop&#x2F;hiveUDF&#x2F;MyUDF.jar&#39;</span><br></pre></td></tr></table></figure>
<h4 id="创建永久函数"><a href="#创建永久函数" class="headerlink" title="创建永久函数"></a>创建永久函数</h4><p><strong>格式</strong></p>
<p>CREATE FUNCTION [db_name.]function_name AS class_name[USING JAR|FILE|ARCHIVE ‘file_uri’ [, JAR|FILE|ARCHIVE ‘file_uri’] ];</p>
<p><strong>例如</strong></p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><span class="line">create function test.testUDF as &#39;cn.sylvanas.hive.udf.FindRegionByPhone&#39; using jar &#39;hdfs:&#x2F;&#x2F;&#x2F;user&#x2F;hadoop&#x2F;hiveUDF&#x2F;MyUDF.jar&#39;</span><br><span class="line"></span><br><span class="line">函数需要属于某个库,如这里是’test’,当其他库调用时,需要加上库名,如’test.testUDF’.</span><br><span class="line"></span><br></pre></td></tr></table></figure>


      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/07/17/2016-07-17-Hadoop04-HBase/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/07/17/2016-07-17-Hadoop04-HBase/" class="post-title-link" itemprop="url">Hadoop学习笔记(4)-HBase</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-07-17 18:00:00" itemprop="dateCreated datePublished" datetime="2016-07-17T18:00:00+08:00">2016-07-17</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/" itemprop="url" rel="index"><span itemprop="name">大数据</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/Hadoop/" itemprop="url" rel="index"><span itemprop="name">Hadoop</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f5wxvstywwj20dx03odg2.jpg"></p>
<h3 id="概述"><a href="#概述" class="headerlink" title="概述"></a>概述</h3><p>&nbsp;&nbsp;HBase是一个分布式的、面向列的开源数据库，该技术来源于 Fay Chang 所撰写的Google论文“Bigtable：一个结构化数据的分布式存储系统”。就像Bigtable利用了Google文件系统（File System）所提供的分布式数据存储一样，HBase在Hadoop之上提供了类似于Bigtable的能力。HBase是Apache的Hadoop项目的子项目。HBase不同于一般的关系数据库，它是一个适合于非结构化数据存储的数据库。另一个不同的是HBase基于列的而不是基于行的模式。</p>
<p>&nbsp;&nbsp;与FUJITSU Cliq等商用大数据产品不同，HBase是Google Bigtable的开源实现，类似Google Bigtable利用GFS作为其文件存储系统，HBase利用Hadoop HDFS作为其文件存储系统；Google运行MapReduce来处理Bigtable中的海量数据，HBase同样利用Hadoop MapReduce来处理HBase中的海量数据；Google Bigtable利用 Chubby作为协同服务，HBase利用Zookeeper作为对应。</p>
<h3 id="HBase的特点"><a href="#HBase的特点" class="headerlink" title="HBase的特点"></a>HBase的特点</h3><ol>
<li>Hbase可以往数据里面insert，也可以update一些数据，但update的实际上也是insert，只是插入一个新的时间戳的一行。delete数据，也是insert，只是insert一行带有delete标记的一行。Hbase的所有操作都是追加插入操作。Hbase是一种日志集数据库。它的存储方式，像是日志文件一样。它是批量大量的往硬盘中写，通常都是以文件形式的读写。这个读写速度，取决于硬盘与机器之间的传输有多快。</li>
<li>Hbase中数据可以保存许多不同时间戳的版本（即同一数据可以复制许多不同的版本，准许数据冗余，也是优势）。数据按时间排序，因此Hbase特别适合寻找按照时间排序寻找Top n的场景。找出某个人最近浏览的消息，最近写的N篇博客，N种行为等等，因此Hbase在互联网应用非常多。</li>
<li>Hbase只有主键索引，因此在建模的时候会遇到了问题。例如，在一张表中，很多的列我都想做某种条件的查询。但却只能在主键上建快速查询。</li>
<li>Hbase是列式数据库,列式数据库的优势在于数据分析。</li>
<li>Hbase中的数据都是字符串，没有其他类型。</li>
</ol>
<h3 id="行式数据库与列式数据库的区别"><a href="#行式数据库与列式数据库的区别" class="headerlink" title="行式数据库与列式数据库的区别"></a>行式数据库与列式数据库的区别</h3><p> <strong>行式数据库</strong></p>
<p> &nbsp;&nbsp;以Oracle为例，数据文件的基本组成单位：块/页。块中数据是按照一行行写入的。这就存在一个问题，当我们要读一个块中的某些列的时候，不能只读这些列，必须把这个块整个的读入内存中，再把这些列的内容读出来。换句话就是：为了读表中的某些列，必须要把整个表的行全部读完，才能读到这些列。这就是行数据库最糟糕的地方。</p>
<p> <strong>列式数据库</strong> </p>
<p> &nbsp;&nbsp;列式数据库是以列作为元素存储的。同一个列的元素会挤在一个块。当要读某些列，只需要把相关的列块读到内存中，这样读的IO量就会少很多。通常，同一个列的数据元素通常格式都是相近的。这就意味着，当数据格式相近的时候，数据就可以做大幅度的压缩。所以，列式数据库在数据压缩方面有很大的优势，压缩不仅节省了存储空间，同时也节省了IO。（这一点，可利用在当数据达到百万、千万级别以后，数据查询之间的优化，提高性能，示场景而定）</p>
<h3 id="HBase架构"><a href="#HBase架构" class="headerlink" title="HBase架构"></a>HBase架构</h3><p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5x1207zfmj20gr0atq4h.jpg"></p>
<p>&nbsp;&nbsp;HBase采用Master/Slave架构搭建集群，它隶属于Hadoop生态系统，由以下类型节点组成：HMaster节点、HRegionServer节点、ZooKeeper集群，而在底层，它将数据存储于HDFS中，因而涉及到HDFS的NameNode、DataNode等节点。</p>
<p><strong>Zookeeper</strong></p>
<p>&nbsp;&nbsp;Zookeeper Quorum存储-ROOT-表地址、HMaster地址。HRegionServer把自己以Ephedral方式注册到Zookeeper中，HMaster随时感知各个HRegionServer的健康状况。</p>
<p><strong>HMaster</strong></p>
<p>&nbsp;&nbsp;HMaster没有单点问题,HBase中可以启动多个HMaster，通过Zookeeper的Master Election机制保证总有一个Master在运行。</p>
<p>&nbsp;&nbsp;HMaster主要负责Table和Region的管理工作</p>
<ol>
<li>实现DDL操作（Data Definition Language，namespace和table的增删改，column familiy的增删改等）。</li>
<li>管理HRegionServer的负载均衡，调整Region分布。</li>
<li>管理和分配HRegion，比如在HRegion split时分配新的HRegion；在HRegionServer退出时迁移其内的HRegion到其他HRegionServer上。</li>
<li>权限控制（ACL）。</li>
</ol>
<p><strong>HRegionServer</strong></p>
<p>&nbsp;&nbsp;HBase中最核心的模块，主要负责响应用户I/O请求，向HDFS文件系统中读写数据。</p>
<ol>
<li>存放和管理本地HRegion。</li>
<li>读写HDFS，管理Table中的数据。</li>
<li>Client直接通过HRegionServer读写数据（从HMaster中获取元数据，找到RowKey所在的HRegion/HRegionServer后）。</li>
</ol>
<p><strong>HRegion</strong></p>
<p><img src="http://ww2.sinaimg.cn/mw690/63503acbjw1f5x2cuji5gj20k409ddho.jpg"></p>
<p>&nbsp;&nbsp;HBase使用RowKey将表水平切割成多个HRegion，从HMaster的角度，每个HRegion都纪录了它的StartKey和EndKey（第一个HRegion的StartKey为空，最后一个HRegion的EndKey为空），由于RowKey是排序的，因而Client可以通过HMaster快速的定位每个RowKey在哪个HRegion中。HRegion由HMaster分配到相应的HRegionServer中，然后由HRegionServer负责HRegion的启动和管理，和Client的通信，负责数据的读(使用HDFS)。</p>
<h3 id="HBase集群搭建"><a href="#HBase集群搭建" class="headerlink" title="HBase集群搭建"></a>HBase集群搭建</h3><p>&nbsp;&nbsp;如果HDFS是HA集群,需要把HDFS的core-site.xml和hdfs-site.xml copy到conf下。</p>
<h4 id="hbase-env-sh"><a href="#hbase-env-sh" class="headerlink" title="hbase-env.sh"></a>hbase-env.sh</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">&#x2F;&#x2F;告诉hbase使用外部的zookeeper</span><br><span class="line">export HBASE_MANAGES_ZK&#x3D;false</span><br></pre></td></tr></table></figure>
<h4 id="hbase-site-xml"><a href="#hbase-site-xml" class="headerlink" title="hbase-site.xml"></a>hbase-site.xml</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br></pre></td><td class="code"><pre><span class="line">&lt;configuration&gt;</span><br><span class="line">	&lt;!-- 指定hbase在HDFS上存储的路径 --&gt;</span><br><span class="line">    &lt;property&gt;</span><br><span class="line">        &lt;name&gt;hbase.rootdir&lt;&#x2F;name&gt;</span><br><span class="line">        &lt;value&gt;hdfs:&#x2F;&#x2F;ns1&#x2F;hbase&lt;&#x2F;value&gt;</span><br><span class="line">    &lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 指定hbase是分布式的 --&gt;</span><br><span class="line">    &lt;property&gt;</span><br><span class="line">        &lt;name&gt;hbase.cluster.distributed&lt;&#x2F;name&gt;</span><br><span class="line">        &lt;value&gt;true&lt;&#x2F;value&gt;</span><br><span class="line">    &lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 指定zk的地址，多个用“,”分割 --&gt;</span><br><span class="line">    &lt;property&gt;</span><br><span class="line">        &lt;name&gt;hbase.zookeeper.quorum&lt;&#x2F;name&gt;</span><br><span class="line">        &lt;value&gt;datanode01:2181,datanode02:2181,datanode03:2181&lt;&#x2F;value&gt;</span><br><span class="line">    &lt;&#x2F;property&gt;</span><br><span class="line">    &lt;property&gt;</span><br><span class="line">        &lt;name&gt;hbase.master.maxclockskew&lt;&#x2F;name&gt;</span><br><span class="line">        &lt;value&gt;180000&lt;&#x2F;value&gt;</span><br><span class="line">        &lt;description&gt;Time difference of regionserver from master&lt;&#x2F;description&gt;</span><br><span class="line"> 	&lt;&#x2F;property&gt;</span><br><span class="line">&lt;&#x2F;configuration&gt;</span><br></pre></td></tr></table></figure>
<h4 id="regionservers"><a href="#regionservers" class="headerlink" title="regionservers"></a>regionservers</h4><p>&nbsp;&nbsp;配置regionserver的节点,为了尽量实现数据本地化,可以与DataNode在同一个节点上。</p>
<h3 id="HBase常用Shell命令"><a href="#HBase常用Shell命令" class="headerlink" title="HBase常用Shell命令"></a>HBase常用Shell命令</h3><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br><span class="line">57</span><br><span class="line">58</span><br><span class="line">59</span><br><span class="line">60</span><br><span class="line">61</span><br><span class="line">62</span><br><span class="line">63</span><br><span class="line">64</span><br><span class="line">65</span><br><span class="line">66</span><br><span class="line">67</span><br><span class="line">68</span><br><span class="line">69</span><br><span class="line">70</span><br><span class="line">71</span><br><span class="line">72</span><br><span class="line">73</span><br><span class="line">74</span><br><span class="line">75</span><br><span class="line">76</span><br><span class="line">77</span><br><span class="line">78</span><br><span class="line">79</span><br><span class="line">80</span><br><span class="line">81</span><br><span class="line">82</span><br><span class="line">83</span><br><span class="line">84</span><br><span class="line">85</span><br><span class="line">86</span><br><span class="line">87</span><br><span class="line">88</span><br><span class="line">89</span><br><span class="line">90</span><br><span class="line">91</span><br><span class="line">92</span><br><span class="line">93</span><br><span class="line">94</span><br><span class="line">95</span><br><span class="line">96</span><br><span class="line">97</span><br><span class="line">98</span><br><span class="line">99</span><br><span class="line">100</span><br><span class="line">101</span><br><span class="line">102</span><br><span class="line">103</span><br><span class="line">104</span><br><span class="line">105</span><br><span class="line">106</span><br><span class="line">107</span><br><span class="line">108</span><br><span class="line">109</span><br><span class="line">110</span><br><span class="line">111</span><br><span class="line">112</span><br><span class="line">113</span><br><span class="line">114</span><br><span class="line">115</span><br><span class="line">116</span><br><span class="line">117</span><br><span class="line">118</span><br><span class="line">119</span><br><span class="line">120</span><br><span class="line">121</span><br><span class="line">122</span><br><span class="line">123</span><br><span class="line">124</span><br><span class="line">125</span><br><span class="line">126</span><br><span class="line">127</span><br><span class="line">128</span><br><span class="line">129</span><br><span class="line">130</span><br><span class="line">131</span><br><span class="line">132</span><br><span class="line">133</span><br><span class="line">134</span><br><span class="line">135</span><br><span class="line">136</span><br><span class="line">137</span><br><span class="line">138</span><br><span class="line">139</span><br></pre></td><td class="code"><pre><span class="line">进入hbase命令行</span><br><span class="line">.&#x2F;hbase shell</span><br><span class="line"></span><br><span class="line">显示hbase中的表</span><br><span class="line">list</span><br><span class="line"></span><br><span class="line">创建user表，包含info、data两个列族</span><br><span class="line">create &#39;user&#39;, &#39;info1&#39;, &#39;data1&#39;</span><br><span class="line">create &#39;user&#39;, &#123;NAME &#x3D;&gt; &#39;info&#39;, VERSIONS &#x3D;&gt; &#39;3&#39;&#125;</span><br><span class="line"></span><br><span class="line">向user表中插入信息，row key为rk0001，列族info中添加name列标示符，值为zhangsan</span><br><span class="line">put &#39;user&#39;, &#39;rk0001&#39;, &#39;info:name&#39;, &#39;zhangsan&#39;</span><br><span class="line"></span><br><span class="line">向user表中插入信息，row key为rk0001，列族info中添加gender列标示符，值为female</span><br><span class="line">put &#39;user&#39;, &#39;rk0001&#39;, &#39;info:gender&#39;, &#39;female&#39;</span><br><span class="line"></span><br><span class="line">向user表中插入信息，row key为rk0001，列族info中添加age列标示符，值为20</span><br><span class="line">put &#39;user&#39;, &#39;rk0001&#39;, &#39;info:age&#39;, 20</span><br><span class="line"></span><br><span class="line">向user表中插入信息，row key为rk0001，列族data中添加pic列标示符，值为picture</span><br><span class="line">put &#39;user&#39;, &#39;rk0001&#39;, &#39;data:pic&#39;, &#39;picture&#39;</span><br><span class="line"></span><br><span class="line">获取user表中row key为rk0001的所有信息</span><br><span class="line">get &#39;user&#39;, &#39;rk0001&#39;</span><br><span class="line"></span><br><span class="line">获取user表中row key为rk0001，info列族的所有信息</span><br><span class="line">get &#39;user&#39;, &#39;rk0001&#39;, &#39;info&#39;</span><br><span class="line"></span><br><span class="line">获取user表中row key为rk0001，info列族的name、age列标示符的信息</span><br><span class="line">get &#39;user&#39;, &#39;rk0001&#39;, &#39;info:name&#39;, &#39;info:age&#39;</span><br><span class="line"></span><br><span class="line">获取user表中row key为rk0001，info、data列族的信息</span><br><span class="line">get &#39;user&#39;, &#39;rk0001&#39;, &#39;info&#39;, &#39;data&#39;</span><br><span class="line">get &#39;user&#39;, &#39;rk0001&#39;, &#123;COLUMN &#x3D;&gt; [&#39;info&#39;, &#39;data&#39;]&#125;</span><br><span class="line"></span><br><span class="line">get &#39;user&#39;, &#39;rk0001&#39;, &#123;COLUMN &#x3D;&gt; [&#39;info:name&#39;, &#39;data:pic&#39;]&#125;</span><br><span class="line"></span><br><span class="line">获取user表中row key为rk0001，列族为info，版本号最新5个的信息</span><br><span class="line">get &#39;user&#39;, &#39;rk0001&#39;, &#123;COLUMN &#x3D;&gt; &#39;info&#39;, VERSIONS &#x3D;&gt; 2&#125;</span><br><span class="line">get &#39;user&#39;, &#39;rk0001&#39;, &#123;COLUMN &#x3D;&gt; &#39;info:name&#39;, VERSIONS &#x3D;&gt; 5&#125;</span><br><span class="line">get &#39;user&#39;, &#39;rk0001&#39;, &#123;COLUMN &#x3D;&gt; &#39;info:name&#39;, VERSIONS &#x3D;&gt; 5, TIMERANGE &#x3D;&gt; [1392368783980, 1392380169184]&#125;</span><br><span class="line"></span><br><span class="line">获取user表中row key为rk0001，cell的值为zhangsan的信息</span><br><span class="line">get &#39;people&#39;, &#39;rk0001&#39;, &#123;FILTER &#x3D;&gt; &quot;ValueFilter(&#x3D;, &#39;binary:图片&#39;)&quot;&#125;</span><br><span class="line"></span><br><span class="line">获取user表中row key为rk0001，列标示符中含有a的信息</span><br><span class="line">get &#39;people&#39;, &#39;rk0001&#39;, &#123;FILTER &#x3D;&gt; &quot;(QualifierFilter(&#x3D;,&#39;substring:a&#39;))&quot;&#125;</span><br><span class="line"></span><br><span class="line">put &#39;user&#39;, &#39;rk0002&#39;, &#39;info:name&#39;, &#39;fanbingbing&#39;</span><br><span class="line">put &#39;user&#39;, &#39;rk0002&#39;, &#39;info:gender&#39;, &#39;female&#39;</span><br><span class="line">put &#39;user&#39;, &#39;rk0002&#39;, &#39;info:nationality&#39;, &#39;中国&#39;</span><br><span class="line">get &#39;user&#39;, &#39;rk0002&#39;, &#123;FILTER &#x3D;&gt; &quot;ValueFilter(&#x3D;, &#39;binary:中国&#39;)&quot;&#125;</span><br><span class="line"></span><br><span class="line"></span><br><span class="line">查询user表中的所有信息</span><br><span class="line">scan &#39;user&#39;</span><br><span class="line"></span><br><span class="line">查询user表中列族为info的信息</span><br><span class="line">scan &#39;user&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info&#39;&#125;</span><br><span class="line">scan &#39;user&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info&#39;, RAW &#x3D;&gt; true, VERSIONS &#x3D;&gt; 5&#125;</span><br><span class="line">scan &#39;persion&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info&#39;, RAW &#x3D;&gt; true, VERSIONS &#x3D;&gt; 3&#125;</span><br><span class="line">查询user表中列族为info和data的信息</span><br><span class="line">scan &#39;user&#39;, &#123;COLUMNS &#x3D;&gt; [&#39;info&#39;, &#39;data&#39;]&#125;</span><br><span class="line">scan &#39;user&#39;, &#123;COLUMNS &#x3D;&gt; [&#39;info:name&#39;, &#39;data:pic&#39;]&#125;</span><br><span class="line"></span><br><span class="line"></span><br><span class="line">查询user表中列族为info、列标示符为name的信息</span><br><span class="line">scan &#39;user&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info:name&#39;&#125;</span><br><span class="line"></span><br><span class="line">查询user表中列族为info、列标示符为name的信息,并且版本最新的5个</span><br><span class="line">scan &#39;user&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info:name&#39;, VERSIONS &#x3D;&gt; 5&#125;</span><br><span class="line"></span><br><span class="line">查询user表中列族为info和data且列标示符中含有a字符的信息</span><br><span class="line">scan &#39;user&#39;, &#123;COLUMNS &#x3D;&gt; [&#39;info&#39;, &#39;data&#39;], FILTER &#x3D;&gt; &quot;(QualifierFilter(&#x3D;,&#39;substring:a&#39;))&quot;&#125;</span><br><span class="line"></span><br><span class="line">查询user表中列族为info，rk范围是[rk0001, rk0003)的数据</span><br><span class="line">scan &#39;people&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info&#39;, STARTROW &#x3D;&gt; &#39;rk0001&#39;, ENDROW &#x3D;&gt; &#39;rk0003&#39;&#125;</span><br><span class="line"></span><br><span class="line">查询user表中row key以rk字符开头的</span><br><span class="line">scan &#39;user&#39;,&#123;FILTER&#x3D;&gt;&quot;PrefixFilter(&#39;rk&#39;)&quot;&#125;</span><br><span class="line"></span><br><span class="line">查询user表中指定范围的数据</span><br><span class="line">scan &#39;user&#39;, &#123;TIMERANGE &#x3D;&gt; [1392368783980, 1392380169184]&#125;</span><br><span class="line"></span><br><span class="line">删除数据</span><br><span class="line">删除user表row key为rk0001，列标示符为info:name的数据</span><br><span class="line">delete &#39;people&#39;, &#39;rk0001&#39;, &#39;info:name&#39;</span><br><span class="line">删除user表row key为rk0001，列标示符为info:name，timestamp为1392383705316的数据</span><br><span class="line">delete &#39;user&#39;, &#39;rk0001&#39;, &#39;info:name&#39;, 1392383705316</span><br><span class="line"></span><br><span class="line"></span><br><span class="line">清空user表中的数据</span><br><span class="line">truncate &#39;people&#39;</span><br><span class="line"></span><br><span class="line"></span><br><span class="line">修改表结构</span><br><span class="line">首先停用user表（新版本不用）</span><br><span class="line">disable &#39;user&#39;</span><br><span class="line"></span><br><span class="line">添加两个列族f1和f2</span><br><span class="line">alter &#39;people&#39;, NAME &#x3D;&gt; &#39;f1&#39;</span><br><span class="line">alter &#39;user&#39;, NAME &#x3D;&gt; &#39;f2&#39;</span><br><span class="line">启用表</span><br><span class="line">enable &#39;user&#39;</span><br><span class="line"></span><br><span class="line"></span><br><span class="line">###disable &#39;user&#39;(新版本不用)</span><br><span class="line">删除一个列族：</span><br><span class="line">alter &#39;user&#39;, NAME &#x3D;&gt; &#39;f1&#39;, METHOD &#x3D;&gt; &#39;delete&#39; 或 alter &#39;user&#39;, &#39;delete&#39; &#x3D;&gt; &#39;f1&#39;</span><br><span class="line"></span><br><span class="line">添加列族f1同时删除列族f2</span><br><span class="line">alter &#39;user&#39;, &#123;NAME &#x3D;&gt; &#39;f1&#39;&#125;, &#123;NAME &#x3D;&gt; &#39;f2&#39;, METHOD &#x3D;&gt; &#39;delete&#39;&#125;</span><br><span class="line"></span><br><span class="line">将user表的f1列族版本号改为5</span><br><span class="line">alter &#39;people&#39;, NAME &#x3D;&gt; &#39;info&#39;, VERSIONS &#x3D;&gt; 5</span><br><span class="line">启用表</span><br><span class="line">enable &#39;user&#39;</span><br><span class="line"></span><br><span class="line"></span><br><span class="line">删除表</span><br><span class="line">disable &#39;user&#39;</span><br><span class="line">drop &#39;user&#39;</span><br><span class="line"></span><br><span class="line"></span><br><span class="line">get &#39;person&#39;, &#39;rk0001&#39;, &#123;FILTER &#x3D;&gt; &quot;ValueFilter(&#x3D;, &#39;binary:中国&#39;)&quot;&#125;</span><br><span class="line">get &#39;person&#39;, &#39;rk0001&#39;, &#123;FILTER &#x3D;&gt; &quot;(QualifierFilter(&#x3D;,&#39;substring:a&#39;))&quot;&#125;</span><br><span class="line">scan &#39;person&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info:name&#39;&#125;</span><br><span class="line">scan &#39;person&#39;, &#123;COLUMNS &#x3D;&gt; [&#39;info&#39;, &#39;data&#39;], FILTER &#x3D;&gt; &quot;(QualifierFilter(&#x3D;,&#39;substring:a&#39;))&quot;&#125;</span><br><span class="line">scan &#39;person&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info&#39;, STARTROW &#x3D;&gt; &#39;rk0001&#39;, ENDROW &#x3D;&gt; &#39;rk0003&#39;&#125;</span><br><span class="line"></span><br><span class="line">scan &#39;person&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info&#39;, STARTROW &#x3D;&gt; &#39;20140201&#39;, ENDROW &#x3D;&gt; &#39;20140301&#39;&#125;</span><br><span class="line">scan &#39;person&#39;, &#123;COLUMNS &#x3D;&gt; &#39;info:name&#39;, TIMERANGE &#x3D;&gt; [1395978233636, 1395987769587]&#125;</span><br><span class="line">delete &#39;person&#39;, &#39;rk0001&#39;, &#39;info:name&#39;</span><br><span class="line"></span><br><span class="line">alter &#39;person&#39;, NAME &#x3D;&gt; &#39;ffff&#39;</span><br><span class="line">alter &#39;person&#39;, NAME &#x3D;&gt; &#39;info&#39;, VERSIONS &#x3D;&gt; 10</span><br><span class="line"></span><br><span class="line"></span><br><span class="line">get &#39;user&#39;, &#39;rk0002&#39;, &#123;COLUMN &#x3D;&gt; [&#39;info:name&#39;, &#39;data:pic&#39;]&#125;</span><br></pre></td></tr></table></figure>
      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/07/15/2016-07-15-Hadoop03-HA/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/07/15/2016-07-15-Hadoop03-HA/" class="post-title-link" itemprop="url">Hadoop学习笔记(3)-HA高可用集群搭建</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-07-15 18:00:00" itemprop="dateCreated datePublished" datetime="2016-07-15T18:00:00+08:00">2016-07-15</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/" itemprop="url" rel="index"><span itemprop="name">大数据</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/Hadoop/" itemprop="url" rel="index"><span itemprop="name">Hadoop</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5w446w8pcj20bp08rweu.jpg"></p>
<h3 id="概述"><a href="#概述" class="headerlink" title="概述"></a>概述</h3><p>&nbsp;&nbsp;HA(High Available), 高可用性集群，是保证业务连续性的有效解决方案，一般有两个或两个以上的节点，且分为活动节点及备用节点。通常把正在执行业务的称为活动节点，而作为活动节点的一个备份的则称为备用节点。当活动节点出现问题，导致正在运行的业务（任务）不能正常运行时，备用节点此时就会侦测到，并立即接续活动节点来执行业务。从而实现业务的不中断或短暂中断。</p>
<p>&nbsp;&nbsp;在hadoop2.0之前,每个集群只有一个NameNode,如果那台机器坏掉,集群作为一个整体将不可用,所以为了解决这个问题Hadoop2.0引入了HA机制,可以通过在同一集群上配置运行两个冗余的NameNodes，做到主动/被动的热备份。这将允许当一个机器宕机时，快速转移到一个新的NameNode，或管理员进行利用故障转移达到优雅的系统升级的目的。</p>
<p>&nbsp;&nbsp;HA一共有二种解决方案，一种是NFS（Network File System）方式，另外一种是QJM（Quorum Journal Manager）方式。</p>
<h3 id="HA架构"><a href="#HA架构" class="headerlink" title="HA架构"></a>HA架构</h3><p><img src="http://ww2.sinaimg.cn/mw690/63503acbjw1f5w61d75ixj20p40jidhc.jpg"></p>
<p>&nbsp;&nbsp;一个典型的HA集群，NameNode会被配置在两台独立的机器上.在任何的时间上，一个NameNode处于活动状态，而另一个在备份状态，活动状态的NameNode会响应集群中所有的客户端，同时备份的只是作为一个副本，保证在必要的时候提供一个快速的转移。</p>
<p>&nbsp;&nbsp;为了使备份的节点和活动的节点保持一致，两个节点通过一个特殊的守护线程相连，这个线程叫做“JournalNodes”（JNs）。当活动状态的节点修改任何的命名空间，他都会通过这些JNs记录日志，备用的节点可以监控edit日志的变化，并且通过JNs读取到变化。备份节点查看edits可以拥有专门的namespace。在故障转移的时候备份节点将在切换至活动状态前确认他从JNs读取到的所有edits。这个确认的目的是为了保证Namespace的状态和迁移之前是完全同步的。</p>
<p>&nbsp;&nbsp;为了提供一个快速的转移，备份NameNode要求保存着最新的block在集群当中的信息。为了能够得到这个，DataNode都被配置了所有的NameNode的地址，并且发送block的地址信息和心跳给两个node。</p>
<p>&nbsp;&nbsp;保证只有一个活跃的NameNode在集群当中是一个十分重要的一步。否则namespace状态在两个节点间不同会导致数据都是或者其他一些不正确的结果。为了确保这个,防止所谓split - brain场景,JournalNodes将只允许一个NameNode进行写操作。故障转移期间,NameNode成为活跃状态的时候会接管JournalNodes的写权限,这会有效防止其他NameNode持续处于活跃状态,允许新的活动节点安全进行故障转移。</p>
<h3 id="搭建HA集群"><a href="#搭建HA集群" class="headerlink" title="搭建HA集群"></a>搭建HA集群</h3><p>&nbsp;&nbsp;hadoop-2.2.0中依然存在一个问题，就是ResourceManager只有一个，存在单点故障，hadoop-2.4.1解决了这个问题，可以有两个ResourceManager，一个是Active，一个是Standby，状态由zookeeper进行协调。</p>
<h4 id="测试集群规划"><a href="#测试集群规划" class="headerlink" title="测试集群规划"></a>测试集群规划</h4><p>&nbsp;&nbsp;实验使用7台虚拟机,规划如下:</p>
<table>
<thead>
<tr>
<th>HostName</th>
<th>IP</th>
<th>Software</th>
<th>Process</th>
</tr>
</thead>
<tbody><tr>
<td>datanode01</td>
<td>192.168.145.140</td>
<td>jdk、hadoop、zookeeper</td>
<td>DataNode、NodeManager、JournalNode、QuorumPeerMain</td>
</tr>
<tr>
<td>datanode02</td>
<td>192.168.145.141</td>
<td>jdk、hadoop、zookeeper</td>
<td>DataNode、NodeManager、JournalNode、QuorumPeerMain</td>
</tr>
<tr>
<td>datanode03</td>
<td>192.168.145.142</td>
<td>jdk、hadoop、zookeeper</td>
<td>DataNode、NodeManager、JournalNode、QuorumPeerMain</td>
</tr>
<tr>
<td>namenode01</td>
<td>192.168.145.143</td>
<td>jdk、hadoop</td>
<td>NameNode、DFSZKFailoverController(ZKFC)</td>
</tr>
<tr>
<td>namenode02</td>
<td>192.168.145.144</td>
<td>jdk、hadoop</td>
<td>NameNode、DFSZKFailoverController(ZKFC)</td>
</tr>
<tr>
<td>yarn01</td>
<td>192.168.145.145</td>
<td>jdk、hadoop</td>
<td>ResourceManager</td>
</tr>
<tr>
<td>yarn02</td>
<td>192.168.145.146</td>
<td>jdk、hadoop</td>
<td>ResourceManager</td>
</tr>
</tbody></table>
<h4 id="ssh免密登陆"><a href="#ssh免密登陆" class="headerlink" title="ssh免密登陆"></a>ssh免密登陆</h4><ul>
<li>namenode01需要配置所有datanode、yarn、namenode的免密登陆。</li>
<li>namenode02需要配置namenode01的免密登陆。</li>
<li>yarn01需要配置所有nodemanager与resourcemanager的免密登陆。</li>
</ul>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br></pre></td><td class="code"><pre><span class="line">#在namenode01上生成密匙</span><br><span class="line">ssh-keygen</span><br><span class="line">#namenode01拷贝密匙(包括自己)</span><br><span class="line">ssh-copy-id namenode01</span><br><span class="line">ssh-copy-id namenode02</span><br><span class="line">ssh-copy-id datanode01</span><br><span class="line">ssh-copy-id datanode02</span><br><span class="line">ssh-copy-id datanode03</span><br><span class="line">ssh-copy-id yarn01</span><br><span class="line">ssh-copy-id yarn02</span><br><span class="line"></span><br><span class="line">#在namenode02上生成密匙</span><br><span class="line">ssh-keygen</span><br><span class="line">ssh-copy-id namenode01</span><br><span class="line">ssh-copy-id namenode02</span><br><span class="line"></span><br><span class="line">#在yarn01上生成密匙</span><br><span class="line">ssh-keygen</span><br><span class="line">#yarn01拷贝密匙</span><br><span class="line">ssh-copy-id yarn02</span><br><span class="line">ssh-copy-id datanode01</span><br><span class="line">ssh-copy-id datanode02</span><br><span class="line">ssh-copy-id datanode03</span><br></pre></td></tr></table></figure>
<h4 id="安装zookeeper"><a href="#安装zookeeper" class="headerlink" title="安装zookeeper"></a>安装zookeeper</h4><ol>
<li>解压zookeeper</li>
<li>重命名zookeeper/conf下的zoo_sample.cfg为zoo.cfg : mv zoo_sample.cfg zoo.cfg</li>
<li>在zoo.cfg中修改dataDir=$ZOOKEEPERHOME/data 这个文件需要自己创建<br>例如:dataDir=/home/application/zookeeper-3.4.5/data</li>
<li>在zoo.cfg中最后添加 server.id=ip:2888:3888<br>例如:<pre><code>server.1=datanode01:2888:3888
server.2=datanode02:2888:3888
server.3=datanode03:2888:3888</code></pre>
</li>
<li>在$ZOOKEEPERHOME/data目录中创建一个myid文件并写入id。<br>例如: echo 1 &gt;/home/application/zookeeper-3.4.5/data/myid<br>id需要跟zoo.cfg中配置的一致。</li>
</ol>
<h4 id="core-site-xml"><a href="#core-site-xml" class="headerlink" title="core-site.xml"></a>core-site.xml</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br></pre></td><td class="code"><pre><span class="line">&lt;configuration&gt;</span><br><span class="line">	&lt;!-- 指定hdfs的nameservice为ns1 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;fs.defaultFS&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;hdfs:&#x2F;&#x2F;ns1&#x2F;&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 指定hadoop临时目录 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;hadoop.tmp.dir&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;&#x2F;home&#x2F;application&#x2F;hadoop-2.6.0&#x2F;tmp&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;				</span><br><span class="line">	&lt;!-- 指定zookeeper地址 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;ha.zookeeper.quorum&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;datanode01:2181,datanode02:2181,datanode03:2181&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">&lt;&#x2F;configuration&gt;</span><br></pre></td></tr></table></figure>
<h4 id="hdfs-site-xml"><a href="#hdfs-site-xml" class="headerlink" title="hdfs-site.xml"></a>hdfs-site.xml</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br><span class="line">57</span><br><span class="line">58</span><br><span class="line">59</span><br><span class="line">60</span><br><span class="line">61</span><br><span class="line">62</span><br><span class="line">63</span><br><span class="line">64</span><br><span class="line">65</span><br><span class="line">66</span><br><span class="line">67</span><br><span class="line">68</span><br><span class="line">69</span><br><span class="line">70</span><br><span class="line">71</span><br></pre></td><td class="code"><pre><span class="line">&lt;configuration&gt;</span><br><span class="line">	&lt;!--指定hdfs的nameservice为ns1，需要和core-site.xml中的保持一致.</span><br><span class="line">	  这个名字是逻辑名字,可以是任意的,它将被用来配置在集群中作为HDFS的绝对路径组件。--&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.nameservices&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;ns1&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- ns1下面有两个NameNode，分别是nn1，nn2 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.ha.namenodes.ns1&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;nn1,nn2&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- nn1的RPC通信地址 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.namenode.rpc-address.ns1.nn1&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;namenode01:9000&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- nn1的http通信地址 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.namenode.http-address.ns1.nn1&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;namenode01:50070&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- nn2的RPC通信地址 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.namenode.rpc-address.ns1.nn2&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;namenode02:9000&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- nn2的http通信地址 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.namenode.http-address.ns1.nn2&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;namenode02:50070&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 指定NameNode的元数据在JournalNode上的存放位置 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.namenode.shared.edits.dir&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;qjournal:&#x2F;&#x2F;datanode01:8485;datanode02:8485;datanode03:8485&#x2F;ns1&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 指定JournalNode在本地磁盘存放数据的位置 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.journalnode.edits.dir&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;&#x2F;home&#x2F;application&#x2F;hadoop-2.6.0&#x2F;journaldata&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 开启NameNode失败自动切换 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.ha.automatic-failover.enabled&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;true&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 配置失败自动切换实现方式 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.client.failover.proxy.provider.ns1&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 配置隔离机制方法，多个机制用换行分割，即每个机制暂用一行--&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.ha.fencing.methods&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;</span><br><span class="line">			sshfence</span><br><span class="line">			shell(&#x2F;bin&#x2F;true)</span><br><span class="line">		&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 使用sshfence隔离机制时需要ssh免登陆 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.ha.fencing.ssh.private-key-files&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;&#x2F;root&#x2F;.ssh&#x2F;id_rsa&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 配置sshfence隔离机制超时时间 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;dfs.ha.fencing.ssh.connect-timeout&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;30000&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">&lt;&#x2F;configuration&gt;</span><br></pre></td></tr></table></figure>
<h4 id="mapred-site-xml"><a href="#mapred-site-xml" class="headerlink" title="mapred-site.xml"></a>mapred-site.xml</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line">&lt;configuration&gt;</span><br><span class="line">	&lt;!-- 指定mr框架为yarn方式 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;mapreduce.framework.name&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;yarn&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">&lt;&#x2F;configuration&gt;</span><br></pre></td></tr></table></figure>
<h4 id="yarn-site-xml"><a href="#yarn-site-xml" class="headerlink" title="yarn-site.xml"></a>yarn-site.xml</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br></pre></td><td class="code"><pre><span class="line">&lt;configuration&gt;</span><br><span class="line">	&lt;!-- 开启RM高可用 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;yarn.resourcemanager.ha.enabled&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;true&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 指定RM的cluster id 这是一个逻辑名称,可以是任意的 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;yarn.resourcemanager.cluster-id&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;yarncluster&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 指定RM的名字 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;yarn.resourcemanager.ha.rm-ids&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;rm1,rm2&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 分别指定RM的地址 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;yarn.resourcemanager.hostname.rm1&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;yarn01&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;yarn.resourcemanager.hostname.rm2&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;yarn02&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;!-- 指定zk集群地址 --&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;yarn.resourcemanager.zk-address&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;datanode01:2181,datanode02:2181,datanode03:2181&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">	&lt;property&gt;</span><br><span class="line">		&lt;name&gt;yarn.nodemanager.aux-services&lt;&#x2F;name&gt;</span><br><span class="line">		&lt;value&gt;mapreduce_shuffle&lt;&#x2F;value&gt;</span><br><span class="line">	&lt;&#x2F;property&gt;</span><br><span class="line">&lt;&#x2F;configuration&gt;</span><br></pre></td></tr></table></figure>
<h4 id="slaves"><a href="#slaves" class="headerlink" title="slaves"></a>slaves</h4><p>&nbsp;&nbsp;slaves指定子节点(DataNode)位置,因为yarn与HDFS分开启动,所以在yarn01中slaves指定的是NodeManager的位置。</p>
<h3 id="启动HA集群"><a href="#启动HA集群" class="headerlink" title="启动HA集群"></a>启动HA集群</h3><ol>
<li><p>启动zookeeper集群</p>
</li>
<li><p>启动JournalNode,一旦JNs启动，必须进行一次初始化同步在两个HA的NameNode，主要是为了元数据。<br>sbin/hadoop-daemon.sh start journalnode</p>
</li>
<li><p>格式化HDFS。<br>在namenode01上执行命令 hdfs namenode -format<br>格式化后会在根据core-site.xml中的hadoop.tmp.dir配置生成个文件,为了同步元数据,需要将tmp文件夹copy到namenode02上。<br>scp -r tmp/ namenode02:/home/application/hadoop-2.6.0/<br>也可以使用命令 hdfs namenode -bootstrapStandby</p>
</li>
<li><p>格式化ZKFC<br>在namenode01上执行命令 hdfs zkfc -formatZK</p>
</li>
<li><p>启动HDFS<br>在namenode01上执行命令 sbin/start-dfs.sh</p>
</li>
<li><p>启动Yarn<br>在yarn01上执行命令 sbin/start-yarn.sh。</p>
</li>
<li><p>因为自带的start-yarn.sh脚本并不会远程启动第二个RM,所以需要在yarn02上单独启动一个RM。<br>在yarn02上执行命令 sbin/yarn-daemon.sh start resourcemanager</p>
</li>
</ol>
<h3 id="管理命令"><a href="#管理命令" class="headerlink" title="管理命令"></a>管理命令</h3><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line">Usage: DFSHAAdmin [-ns &lt;nameserviceId&gt;]</span><br><span class="line">    [-transitionToActive &lt;serviceId&gt;]</span><br><span class="line">    [-transitionToStandby &lt;serviceId&gt;]</span><br><span class="line">    [-failover [--forcefence] [--forceactive] &lt;serviceId&gt; &lt;serviceId&gt;]</span><br><span class="line">    [-getServiceState &lt;serviceId&gt;]</span><br><span class="line">    [-checkHealth &lt;serviceId&gt;]</span><br><span class="line">    [-help &lt;command&gt;]</span><br></pre></td></tr></table></figure>
<p>&nbsp;&nbsp;描述了常用的命令，每个子命令的详细信息你应该运行”hdfs haadmin -help <command>“.</p>
<p><strong>transitionToActive &amp;&amp; transitionToStandby</strong> </p>
<p>&nbsp;&nbsp;切换NameNode的状态（Active或者Standby),这些子命令会使NameNode分别转换状态。</p>
<p><strong>failover</strong></p>
<p>&nbsp;&nbsp;启动两个NameNode之间的故障迁移。</p>
<p>&nbsp;&nbsp;这个子命令会从第一个NameNode迁移到第二个，如果第一个NameNode处于备用状态,这个命令只是没有错误的转换第二个节点到活动状态。如果第一个NameNode处于活跃状态,试图将优雅地转换到备用状态。如果失败,过滤方法(如由dfs.ha.fencing.methods配置)将尝试过滤直到成功。只有在这个过程之后第二个NameNode会转换为活动状态，如果没有过滤方法成功，第二个nameNode将不会活动并返回一个错误。</p>
<p><strong>getServiceState</strong></p>
<p>&nbsp;&nbsp;连接到NameNode，去判断现在的状态打印“standby”或者“active”去标准的输出。这个子命令可以被corn jobs或者是监控脚本使用，为了针对不同状态的NameNode采用不同的行为。</p>
<p><strong>checkHealth</strong></p>
<p>&nbsp;&nbsp;连接NameNode检查健康，NameNode能够执行一些诊断,包括检查如果内部服务正在运行。如果返回0表明NameNode健康，否则返回非0.可以使用此命令用于监测目的。</p>
<p>&nbsp;&nbsp;注意：这个功能实现的不完整，目前除了NameNode完全的关闭，其他全部返回成功。</p>

      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/07/14/2016-07-14-Hadoop02-MapReduce/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/07/14/2016-07-14-Hadoop02-MapReduce/" class="post-title-link" itemprop="url">Hadoop学习笔记(2)-Mapreduce</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-07-14 18:00:00" itemprop="dateCreated datePublished" datetime="2016-07-14T18:00:00+08:00">2016-07-14</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/" itemprop="url" rel="index"><span itemprop="name">大数据</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/Hadoop/" itemprop="url" rel="index"><span itemprop="name">Hadoop</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5w446w8pcj20bp08rweu.jpg"></p>
<h3 id="什么是MapReduce"><a href="#什么是MapReduce" class="headerlink" title="什么是MapReduce"></a>什么是MapReduce</h3><p>&nbsp;&nbsp;MapReduce是一种分布式计算模型，由Google提出，主要用于搜索领域，解决海量数据的计算问题。</p>
<p>&nbsp;&nbsp;MapReduce是处理大量半结构化数据集合的编程模型。编程模型是一种处理并结构化特定问题的方式。例如，在一个关系数据库中，使用一种集合语言执行查询，如SQL。告诉语言想要的结果，并将它提交给系统来计算出如何产生计算。还可以用更传统的语言(C++，Java)，一步步地来解决问题。这是两种不同的编程模型，MapReduce就是另外一种。</p>
<p>&nbsp;&nbsp;MapReduce和Hadoop是相互独立的，实际上又能相互配合工作得很好。</p>
<h3 id="Yarn概述"><a href="#Yarn概述" class="headerlink" title="Yarn概述"></a>Yarn概述</h3><p>&nbsp;&nbsp;Yarn是一个分布式的资源管理系统，用以提高分布式的集群环境下的资源利用率，这些资源包括内存、IO、网络、磁盘等。其产生的原因是为了解决原MapReduce框架的不足。最初MapReduce的committer们还可以周期性的在已有的代码上进行修改，可是随着代码的增加以及原MapReduce框架设计的不足，在原MapReduce框架上进行修改变得越来越困难，所以MapReduce的committer们决定从架构上重新设计MapReduce,使下一代的MapReduce(MRv2/Yarn)框架具有更好的扩展性、可用性、可靠性、向后兼容性和更高的资源利用率以及能支持除了MapReduce计算框架外的更多的计算框架。</p>
<h3 id="原MapReduce架构的不足"><a href="#原MapReduce架构的不足" class="headerlink" title="原MapReduce架构的不足"></a>原MapReduce架构的不足</h3><p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5w4495s9rj20sa0foac1.jpg"></p>
<ul>
<li>JobTracker是集群事务的集中处理点，存在单点故障。</li>
<li>JobTracker需要完成的任务太多，既要维护job的状态又要维护job的task的状态，造成过多的资源消耗。</li>
<li>在taskTracker端，用map/reduce task作为资源的表示过于简单，没有考虑到CPU、内存等资源情况，当把两个需要消耗大内存的task调度到一起，很容易出现OOM(Out Of Memory内存不足)。</li>
<li>把资源强制划分为map/reduce slot,当只有map task时，reduce slot不能用；当只有reduce task时，map slot不能用，容易造成资源利用不足。</li>
</ul>
<h3 id="MRv2-Yarn工作流程"><a href="#MRv2-Yarn工作流程" class="headerlink" title="MRv2/Yarn工作流程"></a>MRv2/Yarn工作流程</h3><h4 id="Yarn架构"><a href="#Yarn架构" class="headerlink" title="Yarn架构"></a>Yarn架构</h4><p>&nbsp;&nbsp;Yarn/MRv2最基本的想法是将原JobTracker主要的资源管理和job调度/监视功能分开作为两个单独的守护进程。</p>
<p>&nbsp;&nbsp;有一个全局的ResourceManager(RM)和每个Application有一个ApplicationMaster(AM)，Application相当于map-reduce job或者DAG jobs。</p>
<p>&nbsp;&nbsp;ResourceManager和NodeManager(NM)组成了基本的数据计算框架。ResourceManager协调集群的资源利用，任何client或者运行着的applicatitonMaster想要运行job或者task都得向RM申请一定的资源。ApplicatonMaster是一个框架特殊的库，对于MapReduce框架而言有它自己的AM实现，用户也可以实现自己的AM，在运行的时候，AM会与NM一起来启动和监视tasks。 </p>
<p><strong>ResourceManager</strong></p>
<p>ResourceManager作为资源的协调者有两个主要的组件：Scheduler和ApplicationsManager(AsM)。</p>
<p>Scheduler负责分配最少但满足application运行所需的资源量给Application。Scheduler只是基于资源的使用情况进行调度，并不负责监视/跟踪application的状态，当然也不会处理失败的task。RM使用resource container概念来管理集群的资源，resource container是资源的抽象，每个container包括一定的内存、IO、网络等资源，不过目前的实现只包括内存一种资源。</p>
<p>ApplicationsManager负责处理client提交的job以及协商第一个container以供applicationMaster运行，并且在applicationMaster失败的时候会重新启动applicationMaster。下面阐述RM具体完成的一些功能。</p>
<ol>
<li><p>资源调度：Scheduler从所有运行着的application收到资源请求后构建一个全局的资源分配计划，然后根据application特殊的限制以及全局的一些限制条件分配资源。</p>
</li>
<li><p>资源监视：Scheduler会周期性的接收来自NM的资源使用率的监控信息，另外applicationMaster可以从Scheduler得到属于它的已完成的container的状态信息。</p>
</li>
<li><p>Application提交：</p>
<ul>
<li>client向AsM获得一个applicationIDclient将application定义以及需要的jar包.</li>
<li>client将application定义以及需要的jar包文件等上传到hdfs的指定目录，由yarn-site.xml的yarn.app.mapreduce.am.staging-dir指定.</li>
<li>client构造资源请求的对象以及application的提交context发送给AsM.</li>
<li>AsM接收application的提交context.</li>
<li>AsM根据application的信息向Scheduler协商一个Container供applicationMaster运行，然后启动applicationMaster.</li>
<li>向该container所属的NM发送launchContainer信息启动该container,也即启动applicationMaster、AsM向client提供运行着的AM的状态信息.</li>
</ul>
</li>
<li><p> AM的生命周期：AsM负责系统中所有AM的生命周期的管理。AsM负责AM的启动，当AM启动后，AM会周期性的向AsM发送heartbeat，默认是1s，AsM据此了解AM的存活情况，并且在AM fail时负责重启AM，若是一定时间过后(默认10分钟)没有收到AM的heartbeat，AsM就认为该AM已经fail。</p>
</li>
</ol>
<p><strong>NodeManager</strong></p>
<p>&nbsp;&nbsp;NM主要负责启动RM分配给AM的container以及代表AM的container，并且会监视container的运行情况。在启动container的时候，NM会设置一些必要的环境变量以及将container运行所需的jar包、文件等从hdfs下载到本地，也就是所谓的资源本地化；当所有准备工作做好后，才会启动代表该container的脚本将程序启动起来。启动起来后，NM会周期性的监视该container运行占用的资源情况，若是超过了该container所声明的资源量，则会kill掉该container所代表的进程。</p>
<p>&nbsp;&nbsp;NM还提供了一个简单的服务以管理它所在机器的本地目录。Applications可以继续访问本地目录即使那台机器上已经没有了属于它的container在运行。例如，Map-Reduce应用程序使用这个服务存储map output并且shuffle它们给相应的reduce task。</p>
<p>&nbsp;&nbsp;NM上还可以扩展自己的服务，yarn提供了一个yarn.nodemanager.aux-services的配置项，通过该配置，用户可以自定义一些服务，例如Map-Reduce的shuffle功能就是采用这种方式实现的。</p>
<p>NM在本地为每个运行着的application生成如下的目录结构：</p>
<p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f5w44b7lxfj208u05dt9v.jpg"></p>
<p>Container目录下的目录结构如下： </p>
<p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f5w44bjrc6j206m05jq3d.jpg"></p>
<p>&nbsp;&nbsp;在启动一个container的时候，NM就执行该container的default_container_executor.sh，该脚本内部会执行launch_container.sh。launch_container.sh会先设置一些环境变量，最后启动执行程序的命令。对于MapReduce而言，启动AM就执行org.apache.hadoop.mapreduce.v2.app.MRAppMaster；启动map/reduce task就执行org.apache.hadoop.mapred.YarnChild。 </p>
<p><strong>ApplicationMaster</strong></p>
<p>&nbsp;&nbsp;ApplicationMaster是一个框架特殊的库，对于Map-Reduce计算模型而言有它自己的ApplicationMaster实现，对于其他的想要运行在yarn上的计算模型而言，必须得实现针对该计算模型的ApplicationMaster用以向RM申请资源运行task，比如运行在yarn上的spark框架也有对应的ApplicationMaster实现，归根结底，yarn是一个资源管理的框架，并不是一个计算框架，要想在yarn上运行应用程序，还得有特定的计算框架的实现。</p>
<h4 id="工作流程"><a href="#工作流程" class="headerlink" title="工作流程"></a>工作流程</h4><p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f5w48pm9byj20ui0kl0uy.jpg"></p>
<ol>
<li>JobClient向ResourceManager(AsM)申请提交一个job。</li>
<li>RM返回jobId和job提交路径。</li>
<li>JobClient提交job相关的文件。</li>
<li>向RM汇报提交完成。</li>
<li>RM将job写入Job Queue。</li>
<li>NodeManager(NM)向Job Queue领取任务。</li>
<li>ApplicationMaster(AM)启动,向RM进行注册。</li>
<li>RM向AM返回资源信息。</li>
<li>AM启动map。</li>
<li>当所有map任务完成后,AM启动reduce。 </li>
<li>AM监视运行着的task直到完成,当task失败时,申请新的container运行失败的task。</li>
<li>当每个map/reduce task完成后,AM运行MR OutputCommitter的cleanup 代码，进行一些收尾工作。</li>
<li>当所有的map/reduce完成后,AM运行OutputCommitter的必要的job commit或者abort APIs。</li>
<li>AM注销自己。</li>
</ol>
<h3 id="Shuffle过程"><a href="#Shuffle过程" class="headerlink" title="Shuffle过程"></a>Shuffle过程</h3><p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f5w449wf5hj20mv0aodh4.jpg"></p>
<p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5w44an439j20k10g5mzg.jpg"></p>
<h4 id="Map"><a href="#Map" class="headerlink" title="Map"></a>Map</h4><ol>
<li><p>每个输入分片会让一个map任务来处理，默认情况下，以HDFS的一个块的大小（默认为64M）为一个分片，当然我们也可以设置块的大小。map输出的结果会暂且放在一个环形内存缓冲区中（该缓冲区的大小默认为100M，由io.sort.mb属性控制），当该缓冲区快要溢出时（默认为缓冲区大小的80%，由io.sort.spill.percent属性控制），会在本地文件系统中创建一个溢出文件，将该缓冲区中的数据写入这个文件。</p>
</li>
<li><p>在写入磁盘之前，线程首先根据reduce任务的数目将数据划分为相同数目的分区，也就是一个reduce任务对应一个分区的数据。这样做是为了避免有些reduce任务分配到大量数据，而有些reduce任务却分到很少数据，甚至没有分到数据的尴尬局面。其实分区就是对数据进行hash的过程。然后对每个分区中的数据进行排序，如果此时设置了Combiner，将排序后的结果进行Combia操作，这样做的目的是让尽可能少的数据写入到磁盘。</p>
</li>
<li><p>当map任务输出最后一个记录时，可能会有很多的溢出文件，这时需要将这些文件合并。合并的过程中会不断地进行排序和combia操作，目的有两个：</p>
<ul>
<li>尽量减少每次写入磁盘的数据量；</li>
<li>尽量减少下一复制阶段网络传输的数据量。最后合并成了一个已分区且已排序的文件。</li>
</ul>
<p>为了减少网络传输的数据量，这里可以将数据压缩，只要将mapred.compress.map.out设置为true就可以了。</p>
</li>
<li><p>将分区中的数据拷贝给相对应的reduce Task。有人可能会问：分区中的数据怎么知道它对应的reduce是哪个呢？其实map任务一直和其父TaskTracker保持联系，而TaskTracker又一直和JobTracker保持心跳。所以JobTracker中保存了整个集群中的宏观信息。只要reduce任务向JobTracker获取对应的map输出位置就ok了哦。</p>
</li>
</ol>
<h4 id="Reduce"><a href="#Reduce" class="headerlink" title="Reduce"></a>Reduce</h4><ol>
<li><p>Reduce会接收到不同map任务传来的数据，并且每个map传来的数据都是有序的。如果reduce端接受的数据量相当小，则直接存储在内存中（缓冲区大小由mapred.job.shuffle.input.buffer.percent属性控制，表示用作此用途的堆空间的百分比），如果数据量超过了该缓冲区大小的一定比例（由mapred.job.shuffle.merge.percent决定），则对数据合并后溢写到磁盘中。</p>
</li>
<li><p>随着溢写文件的增多，后台线程会将它们合并成一个更大的有序的文件，这样做是为了给后面的合并节省时间。其实不管在map端还是reduce端，MapReduce都是反复地执行排序，合并操作。</p>
</li>
<li><p>合并的过程中会产生许多的中间文件（写入磁盘了），但MapReduce会让写入磁盘的数据尽可能地少，并且最后一次合并的结果并没有写入磁盘，而是直接输入到reduce函数。</p>
</li>
</ol>
<h3 id="WordCount案例"><a href="#WordCount案例" class="headerlink" title="WordCount案例"></a>WordCount案例</h3><h4 id="Mapper"><a href="#Mapper" class="headerlink" title="Mapper"></a>Mapper</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">MyWordCountMapper</span> <span class="keyword">extends</span> <span class="title">Mapper</span>&lt;<span class="title">LongWritable</span>, <span class="title">Text</span>, <span class="title">Text</span>, <span class="title">LongWritable</span>&gt; </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">protected</span> <span class="keyword">void</span> <span class="title">map</span><span class="params">(LongWritable key, Text value, Context context)</span> <span class="keyword">throws</span> IOException, InterruptedException </span>&#123;</span><br><span class="line">		<span class="comment">// 读取一行的value</span></span><br><span class="line">		String line = value.toString();</span><br><span class="line">		<span class="comment">// 按照规则切分</span></span><br><span class="line">		String[] words = line.split(<span class="string">&quot; &quot;</span>);</span><br><span class="line">		<span class="comment">// 按照&lt;单词,1&gt;的格式输出</span></span><br><span class="line">		<span class="keyword">for</span> (String word : words) &#123;</span><br><span class="line">			context.write(<span class="keyword">new</span> Text(word), <span class="keyword">new</span> LongWritable(<span class="number">1</span>));</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;</span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h4 id="Reducer"><a href="#Reducer" class="headerlink" title="Reducer"></a>Reducer</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">MyWordCountReducer</span> <span class="keyword">extends</span> <span class="title">Reducer</span>&lt;<span class="title">Text</span>, <span class="title">LongWritable</span>, <span class="title">Text</span>, <span class="title">LongWritable</span>&gt; </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">protected</span> <span class="keyword">void</span> <span class="title">reduce</span><span class="params">(Text key, Iterable&lt;LongWritable&gt; values, Context context)</span></span></span><br><span class="line"><span class="function">			<span class="keyword">throws</span> IOException, InterruptedException </span>&#123;</span><br><span class="line">		<span class="comment">// 初始化计数器</span></span><br><span class="line">		<span class="keyword">long</span> count = <span class="number">0</span>;</span><br><span class="line">		<span class="comment">// 迭代values,累加计数器计算出总次数</span></span><br><span class="line">		<span class="keyword">for</span> (LongWritable value : values) &#123;</span><br><span class="line">			count += value.get();</span><br><span class="line">		&#125;</span><br><span class="line">		<span class="comment">// 输出&lt;单词,总次数&gt;</span></span><br><span class="line">		context.write(key, <span class="keyword">new</span> LongWritable(count));</span><br><span class="line">	&#125;</span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h4 id="Main"><a href="#Main" class="headerlink" title="Main"></a>Main</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">MyWordCountDriver</span> </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">static</span> <span class="keyword">void</span> <span class="title">main</span><span class="params">(String[] args)</span></span></span><br><span class="line"><span class="function">	    <span class="keyword">throws</span> IOException, ClassNotFoundException, InterruptedException </span>&#123;</span><br><span class="line">		Configuration conf = <span class="keyword">new</span> Configuration();</span><br><span class="line">		<span class="comment">// 构造一个job对象</span></span><br><span class="line">		Job wordCountJob = Job.getInstance(conf);</span><br><span class="line"></span><br><span class="line">		<span class="comment">// 指定job用到的jar包位置,这里使用当前类</span></span><br><span class="line">		wordCountJob.setJarByClass(MyWordCountDriver.class);</span><br><span class="line"></span><br><span class="line">		<span class="comment">// 指定mapper</span></span><br><span class="line">		wordCountJob.setMapperClass(MyWordCountMapper.class);</span><br><span class="line">		<span class="comment">// 指定reducer</span></span><br><span class="line">		wordCountJob.setReducerClass(MyWordCountReducer.class);</span><br><span class="line"></span><br><span class="line">		<span class="comment">// 指定mapper输出key/value的类型</span></span><br><span class="line">		wordCountJob.setMapOutputKeyClass(Text.class);</span><br><span class="line">		wordCountJob.setMapOutputValueClass(LongWritable.class);</span><br><span class="line"></span><br><span class="line">		<span class="comment">// 指定reducer输出key/value的类型</span></span><br><span class="line">		wordCountJob.setOutputKeyClass(Text.class);</span><br><span class="line">		wordCountJob.setOutputValueClass(LongWritable.class);</span><br><span class="line"></span><br><span class="line">		<span class="comment">// 指定输入数据的路径</span></span><br><span class="line">		FileInputFormat.setInputPaths(wordCountJob, <span class="keyword">new</span> Path(args[<span class="number">0</span>]));</span><br><span class="line">		<span class="comment">// 指定输出结果的路径</span></span><br><span class="line">		FileOutputFormat.setOutputPath(wordCountJob, <span class="keyword">new</span> Path(args[<span class="number">1</span>]));</span><br><span class="line">		<span class="comment">// 通过yarn客户端进行提交,参数2为是否打印到控制台</span></span><br><span class="line">		wordCountJob.waitForCompletion(<span class="keyword">true</span>);</span><br><span class="line">	&#125;</span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h4 id="启动MapReduce"><a href="#启动MapReduce" class="headerlink" title="启动MapReduce"></a>启动MapReduce</h4><p>&nbsp;&nbsp;<strong>方式1:</strong> 将程序打成jar包,上传到hadoop中执行。hadoop jar <jar> [mainClass] args…</p>
<p>&nbsp;&nbsp;<strong>方式2:</strong> 将程序打成jar包,在本地IDE上直接运行(需要代码指定jar)。</p>
<h3 id="自定义Sort"><a href="#自定义Sort" class="headerlink" title="自定义Sort"></a>自定义Sort</h3><h4 id="bean"><a href="#bean" class="headerlink" title="bean"></a>bean</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br><span class="line">57</span><br><span class="line">58</span><br><span class="line">59</span><br><span class="line">60</span><br><span class="line">61</span><br><span class="line">62</span><br><span class="line">63</span><br><span class="line">64</span><br><span class="line">65</span><br><span class="line">66</span><br><span class="line">67</span><br><span class="line">68</span><br><span class="line">69</span><br><span class="line">70</span><br><span class="line">71</span><br><span class="line">72</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">FlowBean</span> <span class="keyword">implements</span> <span class="title">WritableComparable</span>&lt;<span class="title">FlowBean</span>&gt; </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="keyword">private</span> Long upFlow;</span><br><span class="line">	<span class="keyword">private</span> Long downFlow;</span><br><span class="line">	<span class="keyword">private</span> Long sumFlow;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">setAll</span><span class="params">(Long upFlow, Long downFlow)</span> </span>&#123;</span><br><span class="line">		<span class="keyword">this</span>.upFlow = upFlow;</span><br><span class="line">		<span class="keyword">this</span>.downFlow = downFlow;</span><br><span class="line">		<span class="keyword">this</span>.sumFlow = upFlow + downFlow;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> Long <span class="title">getUpFlow</span><span class="params">()</span> </span>&#123;</span><br><span class="line">		<span class="keyword">return</span> upFlow;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">setUpFlow</span><span class="params">(Long upFlow)</span> </span>&#123;</span><br><span class="line">		<span class="keyword">this</span>.upFlow = upFlow;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> Long <span class="title">getDownFlow</span><span class="params">()</span> </span>&#123;</span><br><span class="line">		<span class="keyword">return</span> downFlow;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">setDownFlow</span><span class="params">(Long downFlow)</span> </span>&#123;</span><br><span class="line">		<span class="keyword">this</span>.downFlow = downFlow;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> Long <span class="title">getSumFlow</span><span class="params">()</span> </span>&#123;</span><br><span class="line">		<span class="keyword">return</span> sumFlow;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">setSumFlow</span><span class="params">(Long sumFlow)</span> </span>&#123;</span><br><span class="line">		<span class="keyword">this</span>.sumFlow = sumFlow;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> String <span class="title">toString</span><span class="params">()</span> </span>&#123;</span><br><span class="line">		<span class="keyword">return</span> <span class="string">&quot;FlowBean [upFlow=&quot;</span> + upFlow + <span class="string">&quot;, downFlow=&quot;</span> + downFlow</span><br><span class="line">				+ <span class="string">&quot;, sumFlow=&quot;</span> + sumFlow + <span class="string">&quot;]&quot;</span>;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 序列化</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">write</span><span class="params">(DataOutput out)</span> <span class="keyword">throws</span> IOException </span>&#123;</span><br><span class="line">		out.writeLong(upFlow);</span><br><span class="line">		out.writeLong(downFlow);</span><br><span class="line">		out.writeLong(sumFlow);</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 反序列化</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">readFields</span><span class="params">(DataInput in)</span> <span class="keyword">throws</span> IOException </span>&#123;</span><br><span class="line">		upFlow = in.readLong();</span><br><span class="line">		downFlow = in.readLong();</span><br><span class="line">		sumFlow = in.readLong();</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 降序排序 -1 大于 0 等于 1小于</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">int</span> <span class="title">compareTo</span><span class="params">(FlowBean o)</span> </span>&#123;</span><br><span class="line">		<span class="comment">// 如果当前类的总和大于其他类的总和 则返回-1(大于) false 1(小于)</span></span><br><span class="line">		<span class="keyword">return</span> <span class="keyword">this</span>.sumFlow &gt; o.getSumFlow() ? -<span class="number">1</span> : <span class="number">1</span>;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h4 id="main"><a href="#main" class="headerlink" title="main"></a>main</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br><span class="line">57</span><br><span class="line">58</span><br><span class="line">59</span><br><span class="line">60</span><br><span class="line">61</span><br><span class="line">62</span><br><span class="line">63</span><br><span class="line">64</span><br><span class="line">65</span><br><span class="line">66</span><br><span class="line">67</span><br><span class="line">68</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">FlowSummarySort</span> </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 因为只有key才能进行排序,所以输出key为FlowBean</span></span><br><span class="line"><span class="comment">	 * </span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@author</span> sylvanasp</span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@version</span> 1.0</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="keyword">public</span> <span class="keyword">static</span> <span class="class"><span class="keyword">class</span> <span class="title">FlowSummarySortMapper</span></span></span><br><span class="line"><span class="class">			<span class="keyword">extends</span> <span class="title">Mapper</span>&lt;<span class="title">LongWritable</span>, <span class="title">Text</span>, <span class="title">FlowBean</span>, <span class="title">Text</span>&gt; </span>&#123;</span><br><span class="line"></span><br><span class="line">		<span class="meta">@Override</span></span><br><span class="line">		<span class="function"><span class="keyword">protected</span> <span class="keyword">void</span> <span class="title">map</span><span class="params">(LongWritable key, Text value, Context context)</span></span></span><br><span class="line"><span class="function">				<span class="keyword">throws</span> IOException, InterruptedException </span>&#123;</span><br><span class="line">			String line = value.toString();</span><br><span class="line">			String[] fields = StringUtils.split(line, <span class="string">&quot;\t&quot;</span>);</span><br><span class="line">			String phoneNum = fields[<span class="number">0</span>];</span><br><span class="line">			Long upFlow = Long.parseLong(fields[<span class="number">1</span>]);</span><br><span class="line">			Long downFlow = Long.parseLong(fields[<span class="number">2</span>]);</span><br><span class="line"></span><br><span class="line">			FlowBean flowBean = <span class="keyword">new</span> FlowBean();</span><br><span class="line">			flowBean.setAll(upFlow, downFlow);</span><br><span class="line">			context.write(flowBean, <span class="keyword">new</span> Text(phoneNum));</span><br><span class="line">		&#125;</span><br><span class="line"></span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 因为在mapper中已经完成了排序,所以reducer中需要将phoneNum重新设置为key</span></span><br><span class="line"><span class="comment">	 * </span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@author</span> sylvanasp</span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@version</span> 1.0</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="keyword">public</span> <span class="keyword">static</span> <span class="class"><span class="keyword">class</span> <span class="title">FlowSummarySortReducer</span></span></span><br><span class="line"><span class="class">			<span class="keyword">extends</span> <span class="title">Reducer</span>&lt;<span class="title">FlowBean</span>, <span class="title">Text</span>, <span class="title">Text</span>, <span class="title">FlowBean</span>&gt; </span>&#123;</span><br><span class="line"></span><br><span class="line">		<span class="meta">@Override</span></span><br><span class="line">		<span class="function"><span class="keyword">protected</span> <span class="keyword">void</span> <span class="title">reduce</span><span class="params">(FlowBean bean, Iterable&lt;Text&gt; phoneNum,</span></span></span><br><span class="line"><span class="function"><span class="params">				Context context)</span> <span class="keyword">throws</span> IOException, InterruptedException </span>&#123;</span><br><span class="line">			<span class="comment">// 因为每个bean都是完全独立的,所以Iterable中只有一个数据</span></span><br><span class="line">			<span class="keyword">for</span> (Text phoneNumKey : phoneNum) &#123;</span><br><span class="line">				context.write(phoneNumKey, bean);</span><br><span class="line">			&#125;</span><br><span class="line">		&#125;</span><br><span class="line"></span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">static</span> <span class="keyword">void</span> <span class="title">main</span><span class="params">(String[] args)</span> <span class="keyword">throws</span> Exception </span>&#123;</span><br><span class="line">		Configuration conf = <span class="keyword">new</span> Configuration();</span><br><span class="line">		Job job = Job.getInstance(conf);</span><br><span class="line">		job.setJarByClass(FlowSummarySort.class);</span><br><span class="line">		job.setMapperClass(FlowSummarySortMapper.class);</span><br><span class="line">		job.setReducerClass(FlowSummarySortReducer.class);</span><br><span class="line"></span><br><span class="line">		job.setMapOutputKeyClass(FlowBean.class);</span><br><span class="line">		job.setMapOutputValueClass(Text.class);</span><br><span class="line"></span><br><span class="line">		job.setOutputKeyClass(Text.class);</span><br><span class="line">		job.setOutputValueClass(FlowBean.class);</span><br><span class="line"></span><br><span class="line">		FileInputFormat.setInputPaths(job, <span class="keyword">new</span> Path(args[<span class="number">0</span>]));</span><br><span class="line">		FileOutputFormat.setOutputPath(job, <span class="keyword">new</span> Path(args[<span class="number">1</span>]));</span><br><span class="line"></span><br><span class="line">		<span class="keyword">int</span> result = job.waitForCompletion(<span class="keyword">true</span>) ? <span class="number">0</span> : <span class="number">1</span>;</span><br><span class="line">		System.exit(result);</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h3 id="自定义Partition"><a href="#自定义Partition" class="headerlink" title="自定义Partition"></a>自定义Partition</h3><h4 id="partitioner"><a href="#partitioner" class="headerlink" title="partitioner"></a>partitioner</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">MyPartitioner</span> <span class="keyword">extends</span> <span class="title">Partitioner</span>&lt;<span class="title">Text</span>, <span class="title">FlowBean</span>&gt; </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="comment">// 使用map模拟数据库</span></span><br><span class="line">	<span class="keyword">private</span> <span class="keyword">static</span> HashMap&lt;String, Integer&gt; map = <span class="keyword">new</span> HashMap&lt;String, Integer&gt;();</span><br><span class="line"></span><br><span class="line">	<span class="comment">// 初始化分区规则</span></span><br><span class="line">	<span class="keyword">static</span> &#123;</span><br><span class="line">		map.put(<span class="string">&quot;136&quot;</span>, <span class="number">0</span>);</span><br><span class="line">		map.put(<span class="string">&quot;137&quot;</span>, <span class="number">1</span>);</span><br><span class="line">		map.put(<span class="string">&quot;138&quot;</span>, <span class="number">2</span>);</span><br><span class="line">		map.put(<span class="string">&quot;139&quot;</span>, <span class="number">3</span>);</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">int</span> <span class="title">getPartition</span><span class="params">(Text key, FlowBean value, <span class="keyword">int</span> numPartitions)</span> </span>&#123;</span><br><span class="line">		<span class="comment">// 获取手机号前3位</span></span><br><span class="line">		String phonePrefix = key.toString().substring(<span class="number">0</span>, <span class="number">3</span>);</span><br><span class="line">		<span class="comment">// 根据手机号前缀获得对应的分区编号</span></span><br><span class="line">		Integer partitionId = map.get(phonePrefix);</span><br><span class="line">		<span class="comment">// 如果手机号不在分区规则内,则分配到分区4。</span></span><br><span class="line">		<span class="keyword">return</span> partitionId == <span class="keyword">null</span> ? <span class="number">4</span> : partitionId;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h4 id="main-1"><a href="#main-1" class="headerlink" title="main"></a>main</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br><span class="line">57</span><br><span class="line">58</span><br><span class="line">59</span><br><span class="line">60</span><br><span class="line">61</span><br><span class="line">62</span><br><span class="line">63</span><br><span class="line">64</span><br><span class="line">65</span><br><span class="line">66</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">FlowSummaryPartition</span> </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="keyword">public</span> <span class="keyword">static</span> <span class="class"><span class="keyword">class</span> <span class="title">FlowSummaryPartitionMapper</span></span></span><br><span class="line"><span class="class">			<span class="keyword">extends</span> <span class="title">Mapper</span>&lt;<span class="title">LongWritable</span>, <span class="title">Text</span>, <span class="title">Text</span>, <span class="title">FlowBean</span>&gt; </span>&#123;</span><br><span class="line"></span><br><span class="line">		<span class="meta">@Override</span></span><br><span class="line">		<span class="function"><span class="keyword">protected</span> <span class="keyword">void</span> <span class="title">map</span><span class="params">(LongWritable key, Text value, Context context)</span></span></span><br><span class="line"><span class="function">				<span class="keyword">throws</span> IOException, InterruptedException </span>&#123;</span><br><span class="line">			String line = value.toString();</span><br><span class="line">			String[] fields = StringUtils.split(line, <span class="string">&quot;\t&quot;</span>);</span><br><span class="line"></span><br><span class="line">			String phoneNum = fields[<span class="number">1</span>];</span><br><span class="line">			Long upFlow = Long.parseLong(fields[fields.length - <span class="number">3</span>]);</span><br><span class="line">			Long downFlow = Long.parseLong(fields[fields.length - <span class="number">2</span>]);</span><br><span class="line"></span><br><span class="line">			FlowBean flowBean = <span class="keyword">new</span> FlowBean();</span><br><span class="line">			flowBean.setAll(upFlow, downFlow);</span><br><span class="line">			context.write(<span class="keyword">new</span> Text(phoneNum), flowBean);</span><br><span class="line">		&#125;</span><br><span class="line"></span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="keyword">public</span> <span class="keyword">static</span> <span class="class"><span class="keyword">class</span> <span class="title">FlowSummaryPartitionReducer</span></span></span><br><span class="line"><span class="class">			<span class="keyword">extends</span> <span class="title">Reducer</span>&lt;<span class="title">Text</span>, <span class="title">FlowBean</span>, <span class="title">Text</span>, <span class="title">FlowBean</span>&gt; </span>&#123;</span><br><span class="line"></span><br><span class="line">		<span class="meta">@Override</span></span><br><span class="line">		<span class="function"><span class="keyword">protected</span> <span class="keyword">void</span> <span class="title">reduce</span><span class="params">(Text key, Iterable&lt;FlowBean&gt; beans,</span></span></span><br><span class="line"><span class="function"><span class="params">				Context context)</span> <span class="keyword">throws</span> IOException, InterruptedException </span>&#123;</span><br><span class="line">			<span class="keyword">long</span> upSum = <span class="number">0</span>;</span><br><span class="line">			<span class="keyword">long</span> downSum = <span class="number">0</span>;</span><br><span class="line"></span><br><span class="line">			<span class="keyword">for</span> (FlowBean bean : beans) &#123;</span><br><span class="line">				upSum += bean.getUpFlow();</span><br><span class="line">				downSum += bean.getDownFlow();</span><br><span class="line">			&#125;</span><br><span class="line">			FlowBean flowBean = <span class="keyword">new</span> FlowBean();</span><br><span class="line">			flowBean.setAll(upSum, downSum);</span><br><span class="line">			context.write(key, flowBean);</span><br><span class="line">		&#125;</span><br><span class="line"></span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">static</span> <span class="keyword">void</span> <span class="title">main</span><span class="params">(String[] args)</span> <span class="keyword">throws</span> Exception </span>&#123;</span><br><span class="line">		Configuration conf = <span class="keyword">new</span> Configuration();</span><br><span class="line">		Job job = Job.getInstance(conf);</span><br><span class="line"></span><br><span class="line">		job.setJarByClass(FlowSummaryPartition.class);</span><br><span class="line">		job.setMapperClass(FlowSummaryPartitionMapper.class);</span><br><span class="line">		job.setReducerClass(FlowSummaryPartitionReducer.class);</span><br><span class="line"></span><br><span class="line">		job.setOutputKeyClass(Text.class);</span><br><span class="line">		job.setOutputValueClass(FlowBean.class);</span><br><span class="line"></span><br><span class="line">		<span class="comment">// 设置分区器</span></span><br><span class="line">		job.setPartitionerClass(MyPartitioner.class);</span><br><span class="line">		<span class="comment">// 设置Reducer Task 实例数量 (与分区数一致)</span></span><br><span class="line">		job.setNumReduceTasks(<span class="number">5</span>);</span><br><span class="line"></span><br><span class="line">		FileInputFormat.setInputPaths(job, <span class="keyword">new</span> Path(args[<span class="number">0</span>]));</span><br><span class="line">		FileOutputFormat.setOutputPath(job, <span class="keyword">new</span> Path(args[<span class="number">1</span>]));</span><br><span class="line"></span><br><span class="line">		<span class="keyword">int</span> result = job.waitForCompletion(<span class="keyword">true</span>) ? <span class="number">0</span> : <span class="number">1</span>;</span><br><span class="line">		System.exit(result);</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h3 id="END"><a href="#END" class="headerlink" title="END"></a>END</h3><blockquote>
<p>部分资料来源于<a target="_blank" rel="noopener" href="http://blog.sina.com.cn/s/blog_829a682d0101lc9d.html&amp;http://weixiaolu.iteye.com/blog/1474172">http://blog.sina.com.cn/s/blog_829a682d0101lc9d.html&amp;http://weixiaolu.iteye.com/blog/1474172</a></p>
</blockquote>

      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/07/12/2016-07-12-Hadoop-HDFS/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/07/12/2016-07-12-Hadoop-HDFS/" class="post-title-link" itemprop="url">Hadoop学习笔记(1)-HDFS</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-07-12 18:00:00" itemprop="dateCreated datePublished" datetime="2016-07-12T18:00:00+08:00">2016-07-12</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/" itemprop="url" rel="index"><span itemprop="name">大数据</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%A4%A7%E6%95%B0%E6%8D%AE/Hadoop/" itemprop="url" rel="index"><span itemprop="name">Hadoop</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5w446w8pcj20bp08rweu.jpg"></p>
<h3 id="Hadoop概述"><a href="#Hadoop概述" class="headerlink" title="Hadoop概述"></a>Hadoop概述</h3><p>Hadoop是一个由Apache基金会所开发的分布式系统基础架构。</p>
<p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5w4478njsj20as05wwep.jpg"></p>
<p>&nbsp;&nbsp;Hadoop 由许多元素构成。其最底部是 Hadoop Distributed File System（HDFS），它存储 Hadoop 集群中所有存储节点上的文件。HDFS的上一层是MapReduce 引擎，该引擎由 JobTrackers 和 TaskTrackers 组成。</p>
<p><strong>HDFS</strong></p>
<p>&nbsp;&nbsp;对外部客户机而言，HDFS就像一个传统的分级文件系统。可以创建、删除、移动或重命名文件，等等。但是 HDFS 的架构是基于一组特定的节点构建的，这是由它自身的特点决定的。这些节点包括 NameNode（仅一个），它在 HDFS 内部提供元数据服务；DataNode，它为 HDFS 提供存储块。由于仅存在一个 NameNode，因此这是 HDFS 的一个缺点（单点失败）。</p>
<p>&nbsp;&nbsp;存储在 HDFS 中的文件被分成块，然后将这些块复制到多个计算机中（DataNode）。这与传统的 RAID 架构大不相同。块的大小（通常为 64MB）和复制的块数量在创建文件时由客户机决定。NameNode 可以控制所有文件操作。HDFS 内部的所有通信都基于标准的 TCP/IP 协议。</p>
<p>&nbsp;&nbsp;HDFS和现有的分布式文件系统有很多共同点。但同时，它和其他的分布式文件系统的区别也是很明显的。HDFS是一个高度容错性的系统，适合部署在廉价的机器上。HDFS能提供高吞吐量的数据访问，非常适合大规模数据集上的应用。HDFS放宽了一部分POSIX约束，来实现流式读取文件系统数据的目的。HDFS在最开始是作为Apache Nutch搜索引擎项目的基础架构而开发的。HDFS是Apache Hadoop Core项目的一部分。</p>
<p><strong>NameNode</strong></p>
<p>&nbsp;&nbsp;NameNode 是一个通常在HDFS实例中的单独机器上运行的软件。它负责管理文件系统名称空间和控制外部客户机的访问。NameNode 决定是否将文件映射到 DataNode 上的复制块上。对于最常见的 3 个复制块，第一个复制块存储在同一机架的不同节点上，最后一个复制块存储在不同机架的某个节点上。</p>
<p>&nbsp;&nbsp;实际的 I/O事务并没有经过 NameNode，只有表示 DataNode 和块的文件映射的元数据经过 NameNode。当外部客户机发送请求要求创建文件时，NameNode 会以块标识和该块的第一个副本的 DataNode IP 地址作为响应。这个 NameNode 还会通知其他将要接收该块的副本的 DataNode。</p>
<p>&nbsp;&nbsp;NameNode 在一个称为FsImage的文件中存储所有关于文件系统名称空间的信息。这个文件和一个包含所有事务的记录文件（这里是 EditLog）将存储在 NameNode 的本地文件系统上。FsImage 和 EditLog 文件也需要复制副本，以防文件损坏或 NameNode 系统丢失。</p>
<p>&nbsp;&nbsp;NameNode本身不可避免地具有SPOF（Single Point Of Failure）单点失效的风险，主备模式并不能解决这个问题，通过Hadoop Non-stop namenode才能实现100% uptime可用时间。</p>
<p><strong>DataNode</strong></p>
<p>&nbsp;&nbsp;DataNode 也是一个通常在 HDFS实例中的单独机器上运行的软件。Hadoop 集群包含一个 NameNode 和大量 DataNode。DataNode通常以机架的形式组织，机架通过一个交换机将所有系统连接起来。Hadoop 的一个假设是：机架内部节点之间的传输速度快于机架间节点的传输速度。</p>
<p>&nbsp;&nbsp;DataNode 响应来自 HDFS 客户机的读写请求。它们还响应来自 NameNode 的创建、删除和复制块的命令。NameNode 依赖来自每个 DataNode 的定期心跳（heartbeat）消息。每条消息都包含一个块报告，NameNode 可以根据这个报告验证块映射和其他文件系统元数据。如果 DataNode 不能发送心跳消息，NameNode 将采取修复措施，重新复制在该节点上丢失的块。</p>
<h3 id="HDFS体系结构"><a href="#HDFS体系结构" class="headerlink" title="HDFS体系结构"></a>HDFS体系结构</h3><p><img src="http://ww2.sinaimg.cn/mw690/63503acbjw1f5w447vawzj20hq0aptar.jpg"></p>
<ul>
<li><strong>NameNode</strong>:唯一的master节点,管理HDFS的名称空间和数据块映射信息、配置副本策略和处理客户端请求。</li>
<li><strong>Secondary NameNode</strong>:辅助NameNode，分担NameNode工作，定期合并fsimage和edits并推送给NameNode，紧急情况下可辅助恢复NameNode。</li>
<li><strong>DataNode</strong>:Slave节点，实际存储数据、执行数据块的读写并汇报存储信息给NameNode。</li>
<li><strong>FSImage</strong>:元数据镜像文件。</li>
<li><strong>Edits</strong>:元数据的操作日志。</li>
</ul>
<h3 id="HDFSWriteOperation"><a href="#HDFSWriteOperation" class="headerlink" title="HDFSWriteOperation"></a>HDFSWriteOperation</h3><p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f5w48p90chj20k20bzdgr.jpg"></p>
<p>&nbsp;&nbsp;在分布式文件系统中，需要确保数据的一致性。对于HDFS来说，直到所有要保存数据的DataNodes确认它们都有文件的副本时，数据才被认为写入完成。因此，数据一致性是在写的阶段完成的。一个客户端无论选择从哪个DataNode读取，都将得到相同的数据。</p>
<ol>
<li>客户端请求NameNode,表示写入文件。</li>
<li>NameNode响应客户端,并告诉客户端将文件保存到DataNodeA、B、D。</li>
<li>客户端连接DataNodeA写入文件,DataNode集群内完成复制。</li>
<li>DataNodeA将文件副本发送给DataNodeB。</li>
<li>DataNodeB将文件副本发送给DataNodeD。</li>
<li>DataNodeD返回确认消息给DataNodeB。</li>
<li>DataNodeB返回确认消息给DataNodeA。</li>
<li>DataNodeA返回确认消息给客户端,写入完成。</li>
</ol>
<p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f5w448fsbnj20gn0a6acc.jpg"></p>
<ol>
<li>Client调用DistributedFileSystem的create()函数创建新文件。</li>
<li>DistributedFileSystem使用RPC调用NameNode创建一个没有block关联的新文件,NameNode在创建之前将进行校验,如果校验通过,NameNode则创建一个新文件并记录一条记录,否则抛出IO异常。</li>
<li>前两步成功后,将会返回一个DFSOutputStream对象,DFSOutputStream可以协调NameNode与DataNode,当客户端写入数据到DFSOutputStream,DFSOutputStream会将数据分割为一个一个Packet(数据包),并写入数据队列。</li>
<li>DataStreamer处理数据队列,它会先去询问NameNode存储到哪几个DataNode,例如Replication为3,则会去找到3个最适合的DataNode。DataStreamer会将DataNode排成一个Pipeline,它会将Packet按队列输出到管道中的第一个DataNode,第一个DataNode又会把Packet输出到第二个DataNode,直到最后一个DataNode。</li>
<li>DataStreamer中还有一个Ack Queue,Ack Queue之中也含有Packet。Ack Queue负责接收DataNode的确认响应,当Pipeline中的所有DataNode都确认完毕后,Ack Queue将移除对应的Packet。</li>
<li>Client完成数据写入,关闭流。</li>
<li>DataStreamer等待Ack Queue信息,当收到最后一个信息时,通知NameNode把文件标记为完成。</li>
</ol>
<h3 id="HDFSReadOperation"><a href="#HDFSReadOperation" class="headerlink" title="HDFSReadOperation"></a>HDFSReadOperation</h3><p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f5w48ovukqj20jm0ermy3.jpg"></p>
<ol>
<li>客户端请求NameNode,表示读取文件。</li>
<li>NameNode响应客户端,将block(数据块)的信息发送给客户端。</li>
<li>客户端检查数据块信息,连接相关的DataNode。</li>
<li>DataNodeA将block1发送给客户端。</li>
<li>DataNodeB将block2发送给客户端。</li>
<li>拼接数据,读取完成。</li>
</ol>
<p><img src="http://ww2.sinaimg.cn/mw690/63503acbjw1f5w4486azhj20g209adi1.jpg"></p>
<ol>
<li>Client调用FileSystem的open()函数打开希望读取的文件。</li>
<li> DistributedFileSystem使用RPC调用NameNode确定文件起始块的位置，同一Block按照重复数会返回多个位置，这些位置按照Hadoop集群拓扑结构排序，距离客户端近的排在前面。</li>
<li>前两步成功后,将会返回一个DFSInputStream对象,DFSInputStream可以协调NameNode与DataNode。客户端对DFSInputStream输入流调用read()函数。</li>
<li>DFSInputStream连接距离最近的DataNode，通过对数据流反复调用read()函数，可以将数据从DataNode传输到客户端。</li>
<li>当到达Block的末端时，DFSInputStream会关闭与该DataNode的连接，然后寻找下一个Block的最佳DataNode，这些操作对客户端来说是透明的。</li>
<li>客户端完成读取，对FSDataInputStream调用close()关闭文件读取。</li>
</ol>
<h3 id="HDFSShell命令"><a href="#HDFSShell命令" class="headerlink" title="HDFSShell命令"></a>HDFSShell命令</h3><p>&nbsp;&nbsp;既然 HDFS 是存取数据的分布式文件系统，那么对 HDFS 的操作，就是文件系统的基本 操作，比如文件的创建、修改、删除、修改权限等，文件夹的创建、删除、重命名等。对 HDFS 的操作命令类似于 Linux 的 shell 对文件的操作，如 ls、mkdir、rm 等。 我们执行以下操作的时候，一定要确定 hadoop 是正常运行的，使用 jps 命令确保看到 各个 hadoop 进程。 </p>
<table>
<thead>
<tr>
<th>命令名</th>
<th>格式</th>
<th>含义</th>
</tr>
</thead>
<tbody><tr>
<td>-ls</td>
<td>-ls&lt;路径&gt;</td>
<td>查看指定路径的当前目录结构</td>
</tr>
<tr>
<td>-lsr</td>
<td>-lsr&lt;路径&gt;</td>
<td>递归查看指定路径的目录结构</td>
</tr>
<tr>
<td>-du</td>
<td>-du&lt;路径&gt;</td>
<td>统计目录下个文件大小</td>
</tr>
<tr>
<td>-dus</td>
<td>-dus&lt;路径&gt;</td>
<td>汇总统计目录下文件(夹)大小</td>
</tr>
<tr>
<td>-count</td>
<td>-count[-q]&lt;路径&gt;</td>
<td>统计文件(夹)数量</td>
</tr>
<tr>
<td>-mv</td>
<td>-mv&lt;源路径&gt;&lt;目的路径&gt;</td>
<td>移动</td>
</tr>
<tr>
<td>-cp</td>
<td>-cp&lt;源路径&gt;&lt;目的路径&gt;</td>
<td>复制</td>
</tr>
<tr>
<td>-rm</td>
<td>-rm[-skipTrash]&lt;路径&gt;</td>
<td>删除文件/空白文件夹</td>
</tr>
<tr>
<td>-rmr</td>
<td>-rmr[-skipTrash]&lt;路径&gt;</td>
<td>递归删除</td>
</tr>
<tr>
<td>-put</td>
<td>-put&lt;多个 linux 上的文件&gt;&lt;hdfs 路径&gt;</td>
<td>上传文件</td>
</tr>
<tr>
<td>-copyFromLocal</td>
<td>-copyFromLocal&lt;多个 linux 上的文件&gt; &lt;hdfs 路径&gt;</td>
<td>从本地复制</td>
</tr>
<tr>
<td>-moveFromLocal</td>
<td>-moveFromLocal&lt;多个 linux 上的文件&gt; &lt;hdfs 路径&gt;</td>
<td>从本地移动</td>
</tr>
<tr>
<td>-getmerge</td>
<td>-getmerge&lt;源路径&gt;&lt;linux 路径&gt;</td>
<td>合并到本地</td>
</tr>
<tr>
<td>-cat</td>
<td>-cat&lt;hdfs 路径&gt;</td>
<td>查看文件内容</td>
</tr>
<tr>
<td>-text</td>
<td>-text&lt;hdfs 路径&gt;</td>
<td>查看文件内容</td>
</tr>
<tr>
<td>-copyToLocal</td>
<td>-copyToLocal[-ignoreCrc][-crc][hdfs 源路 径][linux 目的路径]</td>
<td>复制到本地</td>
</tr>
<tr>
<td>-moveToLocal</td>
<td>-moveToLocal[-crc]&lt;hdfs 源路径&gt;&lt;linux 目的路径&gt;</td>
<td>移动到本地</td>
</tr>
<tr>
<td>-mkdir</td>
<td>-mkdir&lt;hdfs 路径&gt;</td>
<td>创建空白文件夹</td>
</tr>
<tr>
<td>-setrep</td>
<td>-setrep[-R][-w]&lt;副本数&gt;&lt;路径&gt;</td>
<td>修改副本数量</td>
</tr>
<tr>
<td>-touchz</td>
<td>-touchz&lt;文件路径&gt;</td>
<td>创建空白文件</td>
</tr>
<tr>
<td>-stat</td>
<td>-stat[format]&lt;路径&gt;</td>
<td>显示文件统计信息</td>
</tr>
<tr>
<td>-tail</td>
<td>-tail[-f]&lt;文件&gt;</td>
<td>查看文件尾部信息</td>
</tr>
<tr>
<td>-chmod</td>
<td>-chmod[-R]&lt;权限模式&gt;[路径]</td>
<td>修改权限</td>
</tr>
<tr>
<td>-chown</td>
<td>-chown[-R][属主][:[属组]] 路径</td>
<td>修改属主</td>
</tr>
<tr>
<td>-chgrp</td>
<td>-chgrp[-R] 属组名称 路径</td>
<td>修改属组</td>
</tr>
<tr>
<td>-help</td>
<td>-help[命令选项]</td>
<td>帮助</td>
</tr>
</tbody></table>
<h3 id="使用JAVA操作HDFS"><a href="#使用JAVA操作HDFS" class="headerlink" title="使用JAVA操作HDFS"></a>使用JAVA操作HDFS</h3><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br><span class="line">57</span><br><span class="line">58</span><br><span class="line">59</span><br><span class="line">60</span><br><span class="line">61</span><br><span class="line">62</span><br><span class="line">63</span><br><span class="line">64</span><br><span class="line">65</span><br><span class="line">66</span><br><span class="line">67</span><br><span class="line">68</span><br><span class="line">69</span><br><span class="line">70</span><br><span class="line">71</span><br><span class="line">72</span><br><span class="line">73</span><br><span class="line">74</span><br><span class="line">75</span><br><span class="line">76</span><br><span class="line">77</span><br><span class="line">78</span><br><span class="line">79</span><br><span class="line">80</span><br><span class="line">81</span><br><span class="line">82</span><br><span class="line">83</span><br><span class="line">84</span><br><span class="line">85</span><br><span class="line">86</span><br><span class="line">87</span><br><span class="line">88</span><br><span class="line">89</span><br><span class="line">90</span><br><span class="line">91</span><br><span class="line">92</span><br><span class="line">93</span><br><span class="line">94</span><br><span class="line">95</span><br><span class="line">96</span><br><span class="line">97</span><br><span class="line">98</span><br><span class="line">99</span><br><span class="line">100</span><br><span class="line">101</span><br><span class="line">102</span><br><span class="line">103</span><br><span class="line">104</span><br><span class="line">105</span><br><span class="line">106</span><br><span class="line">107</span><br><span class="line">108</span><br><span class="line">109</span><br><span class="line">110</span><br><span class="line">111</span><br><span class="line">112</span><br><span class="line">113</span><br><span class="line">114</span><br><span class="line">115</span><br><span class="line">116</span><br><span class="line">117</span><br><span class="line">118</span><br><span class="line">119</span><br><span class="line">120</span><br><span class="line">121</span><br><span class="line">122</span><br><span class="line">123</span><br><span class="line">124</span><br><span class="line">125</span><br><span class="line">126</span><br><span class="line">127</span><br><span class="line">128</span><br><span class="line">129</span><br><span class="line">130</span><br><span class="line">131</span><br><span class="line">132</span><br><span class="line">133</span><br><span class="line">134</span><br><span class="line">135</span><br><span class="line">136</span><br><span class="line">137</span><br><span class="line">138</span><br><span class="line">139</span><br><span class="line">140</span><br><span class="line">141</span><br><span class="line">142</span><br><span class="line">143</span><br><span class="line">144</span><br><span class="line">145</span><br><span class="line">146</span><br><span class="line">147</span><br><span class="line">148</span><br><span class="line">149</span><br><span class="line">150</span><br><span class="line">151</span><br><span class="line">152</span><br><span class="line">153</span><br><span class="line">154</span><br><span class="line">155</span><br><span class="line">156</span><br><span class="line">157</span><br><span class="line">158</span><br><span class="line">159</span><br><span class="line">160</span><br><span class="line">161</span><br><span class="line">162</span><br><span class="line">163</span><br><span class="line">164</span><br><span class="line">165</span><br><span class="line">166</span><br><span class="line">167</span><br><span class="line">168</span><br><span class="line">169</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">HdfsTest</span> </span>&#123;</span><br><span class="line"></span><br><span class="line">	<span class="keyword">private</span> Configuration conf = <span class="keyword">null</span>;</span><br><span class="line">	<span class="keyword">private</span> FileSystem fs = <span class="keyword">null</span>;</span><br><span class="line">	<span class="keyword">private</span> FSDataInputStream DFSInputStream = <span class="keyword">null</span>;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 初始化FlieSystem</span></span><br><span class="line"><span class="comment">	 * </span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@throws</span> IOException</span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@throws</span> InterruptedException</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Before</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">init</span><span class="params">()</span> <span class="keyword">throws</span> IOException, InterruptedException </span>&#123;</span><br><span class="line">		conf = <span class="keyword">new</span> Configuration();</span><br><span class="line">		conf.set(<span class="string">&quot;fs.defaultFS&quot;</span>, <span class="string">&quot;hdfs://192.168.145.145:9000&quot;</span>);</span><br><span class="line">		fs = fs.get(URI.create(<span class="string">&quot;hdfs://192.168.145.145:9000&quot;</span>), conf, <span class="string">&quot;root&quot;</span>);</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 读取文件</span></span><br><span class="line"><span class="comment">	 * </span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@throws</span> IOException</span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@throws</span> IllegalArgumentException</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Test</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testReadAsOpen</span><span class="params">()</span> <span class="keyword">throws</span> IllegalArgumentException, IOException </span>&#123;</span><br><span class="line">		Path path = <span class="keyword">null</span>;</span><br><span class="line">		<span class="keyword">try</span> &#123;</span><br><span class="line">			path = <span class="keyword">new</span> Path(<span class="string">&quot;/test&quot;</span>);</span><br><span class="line">			<span class="keyword">if</span> (fs.exists(path)) &#123;</span><br><span class="line">				DFSInputStream = fs.open(path);</span><br><span class="line">				IOUtils.copyBytes(DFSInputStream, System.out, conf);</span><br><span class="line">			&#125;</span><br><span class="line">		&#125; <span class="keyword">finally</span> &#123;</span><br><span class="line">			IOUtils.closeStream(DFSInputStream);</span><br><span class="line">			fs.close();</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 上传本地文件</span></span><br><span class="line"><span class="comment">	 * </span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@throws</span> IOException</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Test</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testUpload</span><span class="params">()</span> <span class="keyword">throws</span> IOException </span>&#123;</span><br><span class="line">		Path src = <span class="keyword">null</span>;</span><br><span class="line">		Path dst = <span class="keyword">null</span>;</span><br><span class="line">		<span class="keyword">try</span> &#123;</span><br><span class="line">			src = <span class="keyword">new</span> Path(<span class="string">&quot;f:/saber_by_wlop-d8tjwa5.jpg&quot;</span>);<span class="comment">// 原路径</span></span><br><span class="line">			dst = <span class="keyword">new</span> Path(<span class="string">&quot;/saber.jpg&quot;</span>);<span class="comment">// 目标路径</span></span><br><span class="line">			<span class="comment">// 参数1为是否删除原文件,true为删除,默认为false</span></span><br><span class="line">			fs.copyFromLocalFile(<span class="keyword">false</span>, src, dst);</span><br><span class="line">			<span class="comment">// 打印文件路径</span></span><br><span class="line">			System.out.println(<span class="string">&quot;Upload to &quot;</span> + conf.get(<span class="string">&quot;fs.default.name&quot;</span>));</span><br><span class="line">			System.out.println(<span class="string">&quot;----------------------------------------&quot;</span>);</span><br><span class="line">			FileStatus[] fileStatus = fs.listStatus(dst);</span><br><span class="line">			<span class="keyword">for</span> (FileStatus file : fileStatus) &#123;</span><br><span class="line">				System.out.println(file.getPath());</span><br><span class="line">			&#125;</span><br><span class="line">		&#125; <span class="keyword">finally</span> &#123;</span><br><span class="line">			fs.close();</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 下载文件</span></span><br><span class="line"><span class="comment">	 * </span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@throws</span> IOException</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Test</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testDownload</span><span class="params">()</span> <span class="keyword">throws</span> IOException </span>&#123;</span><br><span class="line">		Path src = <span class="keyword">null</span>;</span><br><span class="line">		Path dst = <span class="keyword">null</span>;</span><br><span class="line">		<span class="keyword">try</span> &#123;</span><br><span class="line">			<span class="keyword">if</span> (fs.exists(src)) &#123;</span><br><span class="line">				src = <span class="keyword">new</span> Path(<span class="string">&quot;/saber.jpg&quot;</span>);</span><br><span class="line">				dst = <span class="keyword">new</span> Path(<span class="string">&quot;D:/temp/&quot;</span>);</span><br><span class="line">				fs.copyToLocalFile(<span class="keyword">false</span>, src, dst);</span><br><span class="line">				<span class="comment">// 打印文件路径</span></span><br><span class="line">				System.out.println(<span class="string">&quot;Download from &quot;</span> + conf.get(<span class="string">&quot;fs.default.name&quot;</span>));</span><br><span class="line">				System.out.println(<span class="string">&quot;--------------------------------------------&quot;</span>);</span><br><span class="line">				<span class="comment">// 迭代路径,参数2为是否递归迭代</span></span><br><span class="line">				RemoteIterator&lt;LocatedFileStatus&gt; iterator = fs.listFiles(src, <span class="keyword">true</span>);</span><br><span class="line">				<span class="keyword">while</span> (iterator.hasNext()) &#123;</span><br><span class="line">					LocatedFileStatus fileStatus = iterator.next();</span><br><span class="line">					System.out.println(fileStatus.getPath());</span><br><span class="line">				&#125;</span><br><span class="line">			&#125;</span><br><span class="line">		&#125; <span class="keyword">finally</span> &#123;</span><br><span class="line">			fs.close();</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 创建目录</span></span><br><span class="line"><span class="comment">	 * </span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@throws</span> IOException</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Test</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testMkdir</span><span class="params">()</span> <span class="keyword">throws</span> IOException </span>&#123;</span><br><span class="line">		Path path = <span class="keyword">null</span>;</span><br><span class="line">		<span class="keyword">try</span> &#123;</span><br><span class="line">			path = <span class="keyword">new</span> Path(<span class="string">&quot;/create01&quot;</span>);</span><br><span class="line">			<span class="comment">// 判断目录是否已存在</span></span><br><span class="line">			<span class="keyword">boolean</span> exists = fs.exists(path);</span><br><span class="line">			<span class="keyword">if</span> (!exists) &#123;</span><br><span class="line">				<span class="comment">// 创建目录</span></span><br><span class="line">				<span class="keyword">boolean</span> mkdirs = fs.mkdirs(path);</span><br><span class="line">				<span class="keyword">if</span> (mkdirs) &#123;</span><br><span class="line">					System.out.println(<span class="string">&quot;create dir success!&quot;</span>);</span><br><span class="line">				&#125; <span class="keyword">else</span> &#123;</span><br><span class="line">					System.out.println(<span class="string">&quot;create dir failure!&quot;</span>);</span><br><span class="line">				&#125;</span><br><span class="line">			&#125;</span><br><span class="line">		&#125; <span class="keyword">finally</span> &#123;</span><br><span class="line">			fs.close();</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 重命名文件</span></span><br><span class="line"><span class="comment">	 * </span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@throws</span> IOException</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Test</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testRename</span><span class="params">()</span> <span class="keyword">throws</span> IOException </span>&#123;</span><br><span class="line">		Path oldPath = <span class="keyword">null</span>;</span><br><span class="line">		Path newPath = <span class="keyword">null</span>;</span><br><span class="line">		<span class="keyword">try</span> &#123;</span><br><span class="line">			oldPath = <span class="keyword">new</span> Path(<span class="string">&quot;/saber.jpg&quot;</span>);</span><br><span class="line">			newPath = <span class="keyword">new</span> Path(<span class="string">&quot;/saber01.jpg&quot;</span>);</span><br><span class="line">			<span class="keyword">if</span> (fs.exists(oldPath)) &#123;</span><br><span class="line">				<span class="keyword">boolean</span> rename = fs.rename(oldPath, newPath);</span><br><span class="line">				<span class="keyword">if</span> (rename) &#123;</span><br><span class="line">					System.out.println(<span class="string">&quot;rename success!&quot;</span>);</span><br><span class="line">				&#125; <span class="keyword">else</span> &#123;</span><br><span class="line">					System.out.println(<span class="string">&quot;rename failure!&quot;</span>);</span><br><span class="line">				&#125;</span><br><span class="line">			&#125;</span><br><span class="line">		&#125; <span class="keyword">finally</span> &#123;</span><br><span class="line">			fs.close();</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;</span><br><span class="line"></span><br><span class="line">	<span class="comment">/**</span></span><br><span class="line"><span class="comment">	 * 删除文件</span></span><br><span class="line"><span class="comment">	 * </span></span><br><span class="line"><span class="comment">	 * <span class="doctag">@throws</span> IOException</span></span><br><span class="line"><span class="comment">	 */</span></span><br><span class="line">	<span class="meta">@Test</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testDelete</span><span class="params">()</span> <span class="keyword">throws</span> IOException </span>&#123;</span><br><span class="line">		Path path = <span class="keyword">null</span>;</span><br><span class="line">		<span class="keyword">try</span> &#123;</span><br><span class="line">			path = <span class="keyword">new</span> Path(<span class="string">&quot;/saber01.jpg&quot;</span>);</span><br><span class="line">			<span class="keyword">if</span> (fs.exists(path)) &#123;</span><br><span class="line">				<span class="keyword">boolean</span> delete = fs.deleteOnExit(path);</span><br><span class="line">				<span class="keyword">if</span> (delete) &#123;</span><br><span class="line">					System.out.println(<span class="string">&quot;delete success!&quot;</span>);</span><br><span class="line">				&#125; <span class="keyword">else</span> &#123;</span><br><span class="line">					System.out.println(<span class="string">&quot;delete failure!&quot;</span>);</span><br><span class="line">				&#125;</span><br><span class="line">			&#125;</span><br><span class="line">		&#125; <span class="keyword">finally</span> &#123;</span><br><span class="line">			fs.close();</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;</span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/07/09/2016-07-09-MyCat/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/07/09/2016-07-09-MyCat/" class="post-title-link" itemprop="url">MyCat快速入门</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-07-09 18:00:00" itemprop="dateCreated datePublished" datetime="2016-07-09T18:00:00+08:00">2016-07-09</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/Database/" itemprop="url" rel="index"><span itemprop="name">Database</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/Database/MySql/" itemprop="url" rel="index"><span itemprop="name">MySql</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f67ok1ytybj207s05674d.jpg"></p>
<h3 id="MyCat概述"><a href="#MyCat概述" class="headerlink" title="MyCat概述"></a>MyCat概述</h3><p> &nbsp;&nbsp;MyCat是基于Cobar二次开发的数据库中间件。它可以低成本的将现有的单机数据库和应用平滑迁移到“云”端，解决数据存储和业务规模迅速增长情况下的数据瓶颈问题。</p>
<p>&nbsp;&nbsp;从定义和分类来看，它是一个开源的分布式数据库系统，是一个实现了MySQL协议的的Server，前端用户可以把 它看作是一个数据库代理，用MySQL客户端工具和命令行访问，而其后端可以用MySQL原生（Native）协议与多个MySQL服务 器通信，也可以用JDBC协议与大多数主流数据库服务器通信，其核心功能是分表分库，即将一个大表水平分割为N个小表，存储 在后端MySQL服务器里或者其他数据库里。</p>
<p>&nbsp;&nbsp;Mycat发展到目前的版本，已经不是一个单纯的MySQL代理了，它的后端可以支持MySQL、SQL Server、Oracle、DB2、 PostgreSQL等主流数据库，也支持MongoDB这种新型NoSQL方式的存储，未来还会支持更多类型的存储。而在最终用户看 来，无论是那种存储方式，在Mycat里，都是一个传统的数据库表，支持标准的SQL语句进行数据的操作，这样一来，对前端业 务系统来说，可以大幅降低开发难度，提升开发速度，在测试阶段，可以将一个表定义为任何一种Mycat支持的存储方式，比如 MySQL的MyASIM表、内存表、或者MongoDB、LevelDB以及号称是世界上最快的内存数据库MemSQL上。试想一下，用户表 存放在MemSQL上，大量读频率远超过写频率的数据如订单的快照数据存放于InnoDB中，一些日志数据存放于MongoDB中， 而且还能把Oracle的表跟MySQL的表做关联查询，你是否有一种不能呼吸的感觉？而未来，还能通过Mycat自动将一些计算分析 后的数据灌入到Hadoop中，并能用Mycat+Storm/Spark Stream引擎做大规模数据分析，看到这里，你大概明白了，Mycat是 什么？Mycat就是BigSQL，Big Data On SQL Database。</p>
<h3 id="MyCat特点"><a href="#MyCat特点" class="headerlink" title="MyCat特点"></a>MyCat特点</h3><ul>
<li>支持SQL92标准。</li>
<li>支持Mysql集群,可以作为Proxy使用。</li>
<li>支持JDBC连接ORACLE、DB2、SQL Server。</li>
<li>支持galera for mysql集群，percona-cluster或者mariadb cluster，提供高可用性数据分片集群。</li>
<li>支持自动故障切换,实现高可用。</li>
<li>支持读写分离,Mysql双主多从,以及一主多从模式。</li>
<li>支持全局表。</li>
<li>支持独有的基于E-R关系分片策略,实现了高效的表关联查询。</li>
<li>支持多平台,部署简单。</li>
</ul>
<h3 id="MyCat原理"><a href="#MyCat原理" class="headerlink" title="MyCat原理"></a>MyCat原理</h3><p>&nbsp;&nbsp;Mycat的原理中最重要的一个动词是“拦截”，它拦截了用户发送过来的SQL语句，首先对SQL语句做了一些特定的分析：如分片分析、路由分析、读写分离分析、缓存分析等，然后将此SQL发往后端的真实数据库，并将返回的结果做适当的处理，最终再返回给用户。</p>
<p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f67ok1abxhj20td0g0jtk.jpg"></p>
<h3 id="分片策略"><a href="#分片策略" class="headerlink" title="分片策略"></a>分片策略</h3><p><img src="http://ww2.sinaimg.cn/mw690/63503acbjw1f67ok1sk3wj20fv0c40ui.jpg"></p>
<p>MyCat支持横向分片与纵向分片。</p>
<ul>
<li><strong>横向分片</strong>:一个表的数据分割到多个节点上,按照行分隔。</li>
<li><strong>纵向分片</strong>:一个数据库中有多个表A、B、C,A存储到节点1,B存储到节点2,C存储到节点3。</li>
</ul>
<p>MyCat通过定义表的分片规则来实现分片,每个表可以捆绑一个分片规则,每个分片规则指定一个分片字段并绑定一个函数,实现动态分片算法。</p>
<ol>
<li><p><strong>schema</strong>:逻辑库,一个逻辑库中定义了所包含的Table。</p>
</li>
<li><p><strong>table</strong></p>
<p><strong>逻辑表</strong>:既然有逻辑库，那么就会有逻辑表，分布式数据库中，对应用来说，读写数据的表就是逻辑表。逻辑表，可以是数据切分后，分布在一个或多个分片库中，也可以不做数据切分，不分片，只有一个表构成。</p>
<p><strong>分片表</strong>:指那些原有的很大数据的表，需要切分到多个数据库的表，这样，每个分片都有一部分数据，所有分片构成了完整的 数据。</p>
<p>例如在mycat配置中的t_node就属于分片表，数据按照规则被分到dn1,dn2两个分片节点(dataNode)上。</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">&lt;table name&#x3D;&quot;t_node&quot; primaryKey&#x3D;&quot;vid&quot; autoIncrement&#x3D;&quot;true&quot; dataNode&#x3D;&quot;dn1,dn2&quot; rule&#x3D;&quot;rule1&quot; &#x2F;&gt;</span><br></pre></td></tr></table></figure>

<p><strong>非分片表</strong>:如果一个数据库中并不是所有的表都有很大的数据,某些表是可以不用进行切分的,<br>非分片表是相对于分片表来说的,就是不需要进行数据切分的表。</p>
<p>例如下面配置的t_node,只存在于一个分片节点dn1上。</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">&lt;table name&#x3D;&quot;t_node&quot; primaryKey&#x3D;&quot;vid&quot; autoIncrement&#x3D;&quot;true&quot; dataNode&#x3D;&quot;dn1&quot; &#x2F;</span><br></pre></td></tr></table></figure>

<p><strong>ER表</strong>:关系型数据库是基于实体关系模型(Entity-RelationshipModel)之上的,通过其描述了真实世界中的事物与关系。MyCat提出了基于E-R关系的数据分片策略,子表的记录与所关联的父表记录存放在同一个数据分片中,即子表依赖于父表,通过表分组(TableGroup)保证数据Join不会跨库操作。</p>
<p><strong>全局表</strong>:一个真实的业务系统中，往往存在大量的类似字典表的表，这些表基本上很少变动，字典表具有以下几个特性： </p>
<ul>
<li>变动不频繁。</li>
<li>数据量总体变化不大。</li>
<li>数据规模不大。</li>
</ul>
<p>对于这类的表，在分片的情况下，当业务表因为规模而进行分片以后，业务表与这些附属的字典表之间的关联，就成了比较棘手的问题，所以Mycat中通过数据冗余来解决这类表的join，即所有的分片都有一份数据的拷贝，所有将字典表或者符合字典表特 性的一些表定义为全局表。</p>
</li>
<li><p><strong>dataNode</strong>:分片节点。数据切分后,一个大表被切分到不同的分片数据库上面,每个表分片所在的数据库就是分片节点。</p>
</li>
<li><p><strong>dataHost</strong>:节点主机。数据切分后，每个分片节点（dataNode）不一定都会独占一台机器，同一机器上面可以有多个分片数据库，这样一个或多个分片节点（dataNode）所在的机器就是节点主机（dataHost）,为了规避单节点主机并发数限制，尽量将读写压力高的分片节点 （dataNode）均衡的放在不同的节点主机（dataHost）.</p>
</li>
<li><p><strong>rule</strong>:分片规则。按照某种业务规则把数据分到某个分片的规则就是分片规则,数据切分选择合适的分片规则非常重要,将极大的避免后续数据处理的难度。</p>
</li>
<li><p><strong>sequence</strong>:全局序列号。数据切分后，原有的关系数据库中的主键约束在分布式条件下将无法使用，因此需要引入外部机制保证数据唯一性标识，这种保证全局性的数据唯一标识的机制就是全局序列号（sequence）。</p>
</li>
</ol>
<h3 id="快速入门"><a href="#快速入门" class="headerlink" title="快速入门"></a>快速入门</h3><p>&amp;&nbsp;&nbsp;MyCat是是使用JAVA语言开发的,所以需要先安装JAVA运行环境,并且要求JDK版本在1.7以上。</p>
<h4 id="1-环境准备"><a href="#1-环境准备" class="headerlink" title="1.环境准备"></a>1.环境准备</h4><p>JDK下载地址:<a target="_blank" rel="noopener" href="http://www.oracle.com/technetwork/java/javase/downloads/jdk7-downloads-1880260.html">http://www.oracle.com/technetwork/java/javase/downloads/jdk7-downloads-1880260.html</a></p>
<p>MySQL下载地址:<a target="_blank" rel="noopener" href="http://dev.mysql.com/downloads/mysql/5.5.html#downloads">http://dev.mysql.com/downloads/mysql/5.5.html#downloads</a></p>
<p>MyCat下载地址:<a target="_blank" rel="noopener" href="https://github.com/MyCATApache/Mycat-download">https://github.com/MyCATApache/Mycat-download</a></p>
<h4 id="2-MyCat的安装"><a href="#2-MyCat的安装" class="headerlink" title="2.MyCat的安装"></a>2.MyCat的安装</h4><ol>
<li>将下载的MyCat压缩包上传到linux服务器。</li>
<li>解压缩MyCat压缩包。</li>
</ol>
<h4 id="3-MyCat目录结构"><a href="#3-MyCat目录结构" class="headerlink" title="3.MyCat目录结构"></a>3.MyCat目录结构</h4><p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f67ok0u4qqj20j002wmyh.jpg"></p>
<p><strong>bin程序目录</strong> 存放了window版本和linux版本，除了提供封装成服务的版本之外，也提供了nowrap的shell脚本命令，方便大 家选择和修改，进入到bin目录： </p>
<ul>
<li>Windows下运行：运行: mycat.bat console 在控制台启动程序，也可以装载成服务，若此程序运行有问题，也可以运行 startup_nowrap.bat，确保java命令可以在命令执行.</li>
<li>Windows下将MyCAT做成系统服务：MyCAT提供warp方式的命令，可以将MyCAT安装成系统服务并可启动和停止。 <ul>
<li>进入bin目录下执行命令 mycat install 执行安装mycat服务. </li>
<li>输入 mycat start 启动mycat服务.</li>
</ul>
</li>
</ul>
<p><strong>conf目录</strong>存放配置文件，server.xml是Mycat服务器参数调整和用户授权的配置文件，schema.xml是逻辑库定义和表以及分片定义的配置文件，rule.xml是分片规则的配置文件，分片规则的具体一些参数信息单独存放为文件，也在这个目录下，配置文件修改，需要重启Mycat或者通过9066端口reload.</p>
<p><strong>lib目录</strong>主要存放mycat依赖的一些jar文件.</p>
<p>日志存放在logs/mycat.log中，每天一个文件，日志的配置是在conf/log4j.xml中，根据自己的需要，可以调整输出级别为 debug，debug级别下，会输出更多的信息，方便排查问题.</p>
<h4 id="4-服务启动"><a href="#4-服务启动" class="headerlink" title="4.服务启动"></a>4.服务启动</h4><p> MyCAT在Linux中部署启动时，首先需要在Linux系统的环境变量中配置MYCAT_HOME,操作方式如下： </p>
<ol>
<li>vi /etc/profile,在系统环境变量文件中增加 MYCAT_HOME=/usr/local/Mycat </li>
<li>执行 source /etc/profile 命令，使环境变量生效。</li>
</ol>
<p>如果是在多台linux系统中组建的MyCat集群,则需要在MyCat Server所在的服务器上配置对其他Ip和主机名的映射。</p>
<ol>
<li>vi /etc/hosts</li>
<li>例如</li>
<li>168.145.1 test_1</li>
<li>168.145.2 test_2</li>
</ol>
<p>配置完毕后,可以cd到/usr/local/mycat/bin目录下执行 ./mycat start 启动服务。<br><strong>注:MyCat的默认服务端口为8066.</strong></p>
<h3 id="MyCat切分数据"><a href="#MyCat切分数据" class="headerlink" title="MyCat切分数据"></a>MyCat切分数据</h3><h4 id="1-配置schema-xml"><a href="#1-配置schema-xml" class="headerlink" title="1.配置schema.xml"></a>1.配置schema.xml</h4><p>Schema.xml作为MyCat中重要的配置文件之一，管理着MyCat的逻辑库、表、分片规则、DataNode以及DataSource。弄懂这些配置，是正确使用MyCat的前提。这里就一层层对该文件进行解析。</p>
<p><strong>schema标签</strong></p>
<table>
<thead>
<tr>
<th>属性名</th>
<th>值</th>
<th>数量限制</th>
</tr>
</thead>
<tbody><tr>
<td>dataNode</td>
<td>String</td>
<td>0..1</td>
</tr>
<tr>
<td>checkSQLschema</td>
<td>Boolean</td>
<td>1</td>
</tr>
<tr>
<td>sqlMaxLimit</td>
<td>Integer</td>
<td>1</td>
</tr>
</tbody></table>
<ul>
<li>dataNode<br>该属性用于绑定逻辑库到某个具体的database上，如果定义了这个属性，那么这个逻辑库就不能工作在分库分表模式下了。也就是说对这个逻辑库的所有操作会直接作用到绑定的dataNode上，这个schema就可以用作读写分离和主从切换，具体如下配置:</li>
</ul>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">&lt;schema name&#x3D;&quot;USERDB&quot; checkSQLschema&#x3D;&quot;false&quot; sqlMaxLimit&#x3D;&quot;100&quot; dataNode&#x3D;&quot;dn1&quot;&gt;    </span><br><span class="line">&lt;!—这里不能配置任何逻辑表信息--&gt; </span><br><span class="line">&lt;&#x2F;schema&gt;</span><br></pre></td></tr></table></figure>

<p> 那么现在USERDB就绑定到dn1所配置的具体database上，可以直接访问这个database。当然该属性只能配置绑定到一个 database上，不能绑定多个dn。</p>
<ul>
<li><p>checkSQLschema<br>当该值设置为 true 时，如果我们执行语句<strong>select * from TESTDB.travelrecord;<strong>则MyCat会把语句修改为</strong>select * from travelrecord;**。即把表示schema的字符去掉，避免发送到后端数据库执行时报（ERROR 1146 (42S02): Table ‘testdb.travelrecord’ doesn’t exist）。 不过，即使设置该值为 true ，如果语句所带的是并非是schema指定的名字，例如：</strong>select * from db1.travelrecord;** 那么 MyCat并不会删除db1这个字段，如果没有定义该库的话则会报错，所以在提供SQL语句的最好是不带这个字段。</p>
</li>
<li><p>sqlMaxLimit<br>当该值设置为某个数值时。每条执行的SQL语句，如果没有加上limit语句，MyCat也会自动的加上所对应的值。例如设置值为 100，执行select * fromTESTDB.travelrecord;的效果为和执行select * from TESTDB.travelrecord limit 100;相同。 不设置该值的话，MyCat默认会把查询到的信息全部都展示出来，造成过多的输出。所以，在正常使用中，还是建议加上一个 值，用于减少过多的数据返回。<br>当然SQL语句中也显式的指定limit的大小，不受该属性的约束。<br>需要注意的是，如果运行的schema为非拆分库的，那么该属性不会生效。需要手动添加limit语句。</p>
</li>
</ul>
<p>&nbsp;&nbsp;schema 标签用于定义MyCat实例中的逻辑库，MyCat可以有多个逻辑库，每个逻辑库都有自己的相关配置。可以使用 schema 标 签来划分这些不同的逻辑库。</p>
<p>如果不配置 schema 标签，所有的表配置，会属于同一个默认的逻辑库。</p>
<p>注意：若是LINUX版本的MYSQL，则需要设置为Mysql大小写不敏感，否则可能会发生表找不到的问题。<br>在MySQL的配置文件中my.ini [mysqld] 中增加一行<br>　　lower_case_table_names = 1</p>
<p><strong>table标签</strong></p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">&lt;table name&#x3D;&quot;travelrecord&quot; dataNode&#x3D;&quot;dn1,dn2,dn3&quot; rule&#x3D;&quot;auto-sharding-long&quot; &gt;&lt;&#x2F;table&gt;</span><br></pre></td></tr></table></figure>
<p>Table 标签定义了MyCat中的逻辑表，所有需要拆分的表都需要在这个标签中定义。</p>
<p><strong>name属性</strong></p>
<p>逻辑表的表名,同个schema标签中定义的名字必须唯一。</p>
<p><strong>dataNode属性</strong></p>
<p>定义这个逻辑表所属的dataNode,该属性的值需要和dataNode标签中name属性的值相互对应。如果需要定义的dn过多可以使 用如下的方法减少配置：</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line">&lt;table name&#x3D;&quot;travelrecord&quot; dataNode&#x3D;&quot;multipleDn$0-99,multipleDn2$100-199&quot; rule&#x3D;&quot;auto-sharding-long&quot; &gt;</span><br><span class="line">&lt;&#x2F;table&gt;</span><br><span class="line"></span><br><span class="line">&lt;dataNode name&#x3D;&quot;multipleDn&quot; dataHost&#x3D;&quot;localhost1&quot; database&#x3D;&quot;db$0-99&quot; &gt;&lt;&#x2F;dataNode&gt;</span><br><span class="line">&lt;dataNode name&#x3D;&quot;multipleDn2&quot; dataHost&#x3D;&quot;localhost1&quot; database&#x3D;&quot; db$0-99&quot; &gt;&lt;&#x2F;dataNode&gt;</span><br><span class="line"></span><br><span class="line">这里需要注意的是database属性所指定的真实database name需要在后面添加一个，例如上面的例子中，我需要在真实的mysql 上建立名称为dbs0到dbs99的database。</span><br></pre></td></tr></table></figure>
<p><strong>rule属性</strong></p>
<p>该属性用于指定逻辑表要使用的规则名字，规则名字在rule.xml中定义，必须与tableRule标签中name属性属性值一一对应。</p>
<p><strong>ruleRequired属性</strong></p>
<p>该属性用于指定表是否绑定分片规则，如果配置为true，但没有配置具体rule的话 ，程序会报错。</p>
<p><strong>primaryKey属性</strong></p>
<p>该逻辑表对应真实表的主键，例如：分片的规则是使用非主键进行分片的，那么在使用主键查询的时候，就会发送查询语句到所有配置的DN上，如果使用该属性配置真实表的主键。<br>那么MyCat会缓存主键与具体DN的信息，那么再次使用非主键进行查询的 时候就不会进行广播式的查询，就会直接发送语句给具体的DN，但是尽管配置该属性，如果缓存并没有命中的话，还是会发送语 句给具体的DN，来获得数据。</p>
<p><strong>type属性</strong></p>
<p>该属性定义了逻辑表的类型，目前逻辑表只有“全局表”和”普通表”两种类型。对应的配置：</p>
<ul>
<li>全局表 global</li>
<li>普通表 不指定该值为global的所有表。</li>
</ul>
<p><strong>autoIncrement属性</strong></p>
<p>mysql对非自增长主键，使用last_insert_id()是不会返回结果的，只会返回0。所以，只有定义了自增长主键的表才可以用 last_insert_id()返回主键值。</p>
<p>mycat目前提供了自增长主键功能，但是如果对应的mysql节点上数据表，没有定义auto_increment，那么在mycat层调用 last_insert_id()也是不会返回结果的。</p>
<p>由于insert操作的时候没有带入分片键，mycat会先取下这个表对应的全局序列，然后赋值给分片键。这样才能正常的插入到数据 库中，最后使用last_insert_id()才会返回插入的分片键值。</p>
<p>如果要使用这个功能最好配合使用数据库模式的全局序列。</p>
<p>使用autoIncrement=“true”指定这个表有使用自增长主键，这样mycat才会不抛出分片键找不到的异常。<br>使用autoIncrement=“false”来禁用这个功能，当然你也可以直接删除掉这个属性。默认就是禁用的。</p>
<p><strong>needAddLimit属性</strong></p>
<p>指定表是否需要自动的在每个语句后面加上limit限制。由于使用了分库分表，数据量有时会特别巨大。这时候执行查询语句，如果恰巧又忘记了加上数量限制的话。那么查询所有的数据出来，也够等上一小会儿的。 所以，mycat就自动的为我们加上LIMIT 100。当然，如果语句中有limit，就不会在次添加了。</p>
<p>这个属性默认为true,你也可以设置成false`禁用掉默认行为。</p>
<p><strong>childTable标签</strong></p>
<p>childTable标签用于定义E-R分片的子表。通过标签上的属性与父表进行关联。</p>
<table>
<thead>
<tr>
<th>属性名</th>
<th>值</th>
<th>数量限制</th>
<th>描述</th>
</tr>
</thead>
<tbody><tr>
<td>name</td>
<td>String</td>
<td>1</td>
<td>定义子表的表名。</td>
</tr>
<tr>
<td>joinKey</td>
<td>String</td>
<td>1</td>
<td>插入子表的时候会使用这个列的值查找父表存储的数据节点。</td>
</tr>
<tr>
<td>parentKey</td>
<td>String</td>
<td>1</td>
<td>属性指定的值一般为与父表建立关联关系的列名。程序首先获取joinkey的值，再通过<strong>parentKey</strong>属性指定的列名产生查询语 句，通过执行该语句得到父表存储在哪个分片上。从而确定子表存储的位置。</td>
</tr>
<tr>
<td>primaryKey</td>
<td>String</td>
<td>0..1</td>
<td>同table标签所描述的。</td>
</tr>
<tr>
<td>needAddLimit</td>
<td>Boolean</td>
<td>0..1</td>
<td>同table标签所描述的。</td>
</tr>
</tbody></table>
<p><strong>dataNode标签</strong></p>
<p>dataNode 标签定义了MyCat中的数据节点，也就是我们通常说所的数据分片。一个<strong>dataNode</strong> 标签就是一个独立的数据分 片。 </p>
<table>
<thead>
<tr>
<th>属性名</th>
<th>值</th>
<th>数量限制</th>
<th>描述</th>
</tr>
</thead>
<tbody><tr>
<td>name</td>
<td>String</td>
<td>1</td>
<td>定义数据节点的名字，这个名字需要是唯一的，我们需要在table标签上应用这个名字，来建立表与分片对应的关系。</td>
</tr>
<tr>
<td>dataHost</td>
<td>String</td>
<td>1</td>
<td>该属性用于定义该分片属于哪个数据库实例的，属性值是引用dataHost标签上定义的name属性。</td>
</tr>
<tr>
<td>database</td>
<td>String</td>
<td>1</td>
<td>该属性用于定义该分片属性哪个具体数据库实例上的具体库，因为这里使用两个纬度来定义分片，就是：实例+具体的库。因为 每个库上建立的表和表结构是一样的。所以这样做就可以轻松的对表进行水平拆分。</td>
</tr>
</tbody></table>
<p><strong>dataHost标签</strong></p>
<p>作为Schema.xml中最后的一个标签，该标签在mycat逻辑库中也是作为最底层的标签存在，直接定义了具体的数据库实例、读 写分离配置和心跳语句。</p>
<p><strong>name属性</strong></p>
<p>唯一标识dataHost标签，供上层的标签使用。</p>
<p><strong>maxCon属性</strong></p>
<p>指定每个读写实例连接池的最大连接。也就是说，标签内嵌套的writeHost、readHost标签都会使用这个属性的值来实例化出连接池的最大连接数。</p>
<p><strong>minCon属性</strong></p>
<p>指定每个读写实例连接池的最小连接，初始化连接池的大小。</p>
<p><strong>balance属性</strong></p>
<p>负载均衡类型，目前的取值有3种：</p>
<ol>
<li>balance=“0”, 所有读操作都发送到当前可用的writeHost上。 </li>
<li>balance=“1”，所有读操作都随机的发送到readHost。 </li>
<li>balance=“2”，所有读操作都随机的在writeHost、readhost上分发。</li>
</ol>
<p><strong>writeType属性</strong></p>
<p>负载均衡类型，目前的取值有3种： </p>
<ol>
<li>writeType=“0”, 所有写操作都发送到可用的writeHost上。 </li>
<li>writeType=“1”，所有写操作都随机的发送到readHost。 </li>
<li>writeType=“2”，所有写操作都随机的在writeHost、readhost分上发。</li>
</ol>
<p><strong>dbType属性</strong></p>
<p>指定后端连接的数据库类型，目前支持二进制的mysql协议，还有其他使用JDBC连接的数据库。例如：mongodb、oracle、 spark等。</p>
<p><strong>dbDriver属性</strong></p>
<p>指定连接后端数据库使用的Driver，目前可选的值有native和JDBC。使用native的话，因为这个值执行的是二进制的mysql协 议，所以可以使用mysql和maridb。其他类型的数据库则需要使用JDBC驱动来支持。<br>如果使用JDBC的话需要将符合JDBC 4标准的驱动JAR包放到MYCAT\lib目录下，并检查驱动JAR包中包括如下目录结构的文 件：META-INF\services\java.sql.Driver。在这个文件内写上具体的Driver类名，例如：com.mysql.jdbc.Driver。</p>
<p><strong>heartbeat标签</strong></p>
<p>这个标签内指明用于和后端数据库进行心跳检查的语句。例如,MYSQL可以使用select user()，Oracle可以使用select 1 from dual等。<br>这个标签还有一个connectionInitSql属性，主要是当使用Oracla数据库时，需要执行的初始化SQL语句就这个放到这里面来。例 如：alter session set nls_date_format=’yyyy-mm-dd hh24:miss’</p>
<p><strong>writeHost标签、readHost标签</strong></p>
<p>这两个标签都指定后端数据库的相关配置给mycat，用于实例化后端连接池。唯一不同的是，writeHost指定写实例、readHost 指定读实例，组着这些读写实例来满足系统的要求。</p>
<p>在一个dataHost内可以定义多个writeHost和readHost。但是，如果writeHost指定的后端数据库宕机，那么这个writeHost绑 定的所有readHost都将不可用。另一方面，由于这个writeHost宕机系统会自动的检测到，并切换到备用的writeHost上去。</p>
<p>这两个标签的属性相同，这里就一起介绍。</p>
<table>
<thead>
<tr>
<th>属性名</th>
<th>值</th>
<th>数量限制</th>
<th>描述</th>
</tr>
</thead>
<tbody><tr>
<td>host</td>
<td>String</td>
<td>1</td>
<td>用于标识不同实例，一般writeHost我们使用<em>M1，readHost我们用</em>S1。</td>
</tr>
<tr>
<td>url</td>
<td>String</td>
<td>1</td>
<td>后端实例连接地址，如果是使用native的dbDriver，则一般为address:port这种形式。用JDBC或其他的dbDriver，则需要特殊 指定。当使用JDBC时则可以这么写：jdbc:mysql://localhost:3306/。</td>
</tr>
<tr>
<td>password</td>
<td>String</td>
<td>1</td>
<td>后端存储实例需要的用户名字</td>
</tr>
<tr>
<td>user</td>
<td>String</td>
<td>1</td>
<td>后端存储实例需要的密码</td>
</tr>
</tbody></table>
<h4 id="2-配置server-xml"><a href="#2-配置server-xml" class="headerlink" title="2.配置server.xml"></a>2.配置server.xml</h4><p>server.xml几乎保存了所有mycat需要的系统配置信息。最常用的是在此配置用户名、密码及权限。</p>
<p>例如:给TESTDB配置一个test用户。</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line">&lt;user name&#x3D;&quot;test&quot;&gt;</span><br><span class="line">    &lt;property name&#x3D;&quot;password&quot;&gt;test&lt;&#x2F;property&gt;</span><br><span class="line">    &lt;property name&#x3D;&quot;schemas&quot;&gt;TESTDB&lt;&#x2F;property&gt;</span><br><span class="line">    &lt;property name&#x3D;&quot;readOnly&quot;&gt;true&lt;&#x2F;property&gt;</span><br><span class="line">&lt;&#x2F;user&gt;</span><br></pre></td></tr></table></figure>
<h4 id="3-配置rule-xml"><a href="#3-配置rule-xml" class="headerlink" title="3.配置rule.xml"></a>3.配置rule.xml</h4><p>rule.xml里面就定义了我们对表进行拆分所涉及到的规则定义。我们可以灵活的对表使用不同的分片算法，或者对表使用相同的算法但具体的参数不同。这个文件里面主要有tableRule和function这两个标签。在具体使用过程中可以按照需求添加tableRule和function。</p>
<h3 id="MyCat读写分离"><a href="#MyCat读写分离" class="headerlink" title="MyCat读写分离"></a>MyCat读写分离</h3><p>数据库读写分离对于大型系统或者访问量很高的互联网应用来说，是必不可少的一个重要功能。对于MySQL来说，标准的读写分离是主从模式，一个写节点Master后面跟着多个读节点，读节点的数量取决于系统的压力，通常是1-3个读节点的配置。</p>
<p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f67ok0c0gsj20gx09ogmo.jpg" alt="Mycat读写分离和自动切换机制，需要mysql的主从复制机制配合。"></p>
<h4 id="1-Mysql主从复制"><a href="#1-Mysql主从复制" class="headerlink" title="1.Mysql主从复制"></a>1.Mysql主从复制</h4><p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f67ok01dxoj20j70dota9.jpg"></p>
<p>Mysql主从配置需要注意的地方:</p>
<ol>
<li>主DBServer和从DBServer需要版本一致。</li>
<li>主DBServer和从DBServer数据一致。</li>
<li>主DB server开启二进制日志,主DB server和从DB server的server_id都必须唯一。</li>
</ol>
<h4 id="2-Mysql主服务器配置"><a href="#2-Mysql主服务器配置" class="headerlink" title="2.Mysql主服务器配置"></a>2.Mysql主服务器配置</h4><p>修改/etc路径下的my.cnf文件,在[mysqld]段中添加:</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line">binlog-do-db&#x3D;db1</span><br><span class="line">binlog-ignore-db&#x3D;mysql</span><br><span class="line">#启用二进制日志</span><br><span class="line">log-bin&#x3D;mysql-bin</span><br><span class="line">#服务器唯一ID，一般取IP最后一段</span><br><span class="line">server-id&#x3D;138</span><br></pre></td></tr></table></figure>
<p>修改后,重启mysql服务service mysqld restart</p>
<p>创建一个账户并授权slave。</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line">mysql&gt;GRANT FILE ON *.* TO &#39;backup&#39;@&#39;%&#39; IDENTIFIED BY &#39;123456&#39;;</span><br><span class="line">mysql&gt;GRANT REPLICATION SLAVE, REPLICATION CLIENT ON *.* to &#39;backup&#39;@&#39;%&#39; identified by &#39;123456&#39;; </span><br><span class="line">#一般不用root帐号，“%”表示所有客户端都可能连，只要帐号，密码正确，此处可用具体客户端IP代替，如192.168.145.226，加强安全。</span><br></pre></td></tr></table></figure>
<p>之后刷新权限:FLUSH PRIVILEGES;</p>
<p>可以使用show master status;命令 查询主服务器状态。</p>
<h4 id="3-Mysql从服务器配置"><a href="#3-Mysql从服务器配置" class="headerlink" title="3.Mysql从服务器配置"></a>3.Mysql从服务器配置</h4><p>修改/etc路径下的my.cnf文件,在[mysqld]段中添加一个serverid。</p>
<p>配置从服务器</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">mysql&gt;change master to master_host&#x3D;&#39;192.168.145.138&#39;,master_user&#x3D;&#39;backup&#39;,master_password&#x3D;&#39;123456&#39;,</span><br><span class="line"> master_log_file&#x3D;&#39;mysql-bin.000002&#39;,master_log_pos&#x3D;679;</span><br></pre></td></tr></table></figure>
<p>注意语句中间不要断开，master_port为mysql服务器端口号(无引号)，master_user为执行同步操作的数据库账户，“120”无单引号(此处的120就是show master status 中看到的position的值，这里的mysql-bin.000001就是file对应的值)。</p>
<p>之后启动从服务器复制功能 mysql&gt;start slave;</p>
<p>检查从服务器状态 show slave status;</p>
<p>注：Slave_IO及Slave_SQL进程必须正常运行，即YES状态，否则都是错误的状态(如：其中一个NO均属错误)。</p>
<p>如果出现此错误：<br>Fatal error: The slave I/O thread stops because master and slave have equal MySQL server UUIDs; these UUIDs must be different for replication to work.<br>因为是mysql是克隆的系统所以mysql的uuid是一样的，所以需要修改。</p>
<p>解决方法:<br>删除/var/lib/mysql/auto.cnf文件，重新启动服务。</p>
<h4 id="4-MyCat配置"><a href="#4-MyCat配置" class="headerlink" title="4.MyCat配置"></a>4.MyCat配置</h4><p>Mycat 1.4 支持MySQL主从复制状态绑定的读写分离机制，让读更加安全可靠，配置如下：</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><span class="line">&lt;dataNode name&#x3D;&quot;dn1&quot; dataHost&#x3D;&quot;localhost1&quot; database&#x3D;&quot;db1&quot; &#x2F;&gt;</span><br><span class="line">	&lt;dataNode name&#x3D;&quot;dn2&quot; dataHost&#x3D;&quot;localhost1&quot; database&#x3D;&quot;db2&quot; &#x2F;&gt;</span><br><span class="line">	&lt;dataNode name&#x3D;&quot;dn3&quot; dataHost&#x3D;&quot;localhost1&quot; database&#x3D;&quot;db3&quot; &#x2F;&gt;</span><br><span class="line">	&lt;dataHost name&#x3D;&quot;localhost1&quot; maxCon&#x3D;&quot;1000&quot; minCon&#x3D;&quot;10&quot; balance&#x3D;&quot;1&quot;</span><br><span class="line">		writeType&#x3D;&quot;0&quot; dbType&#x3D;&quot;mysql&quot; dbDriver&#x3D;&quot;native&quot; switchType&#x3D;&quot;2&quot;  slaveThreshold&#x3D;&quot;100&quot;&gt;</span><br><span class="line">		&lt;heartbeat&gt;show slave status&lt;&#x2F;heartbeat&gt;</span><br><span class="line">		&lt;writeHost host&#x3D;&quot;hostM&quot; url&#x3D;&quot;192.168.25.138:3306&quot; user&#x3D;&quot;root&quot;</span><br><span class="line">			password&#x3D;&quot;root&quot;&gt;</span><br><span class="line">			&lt;readHost host&#x3D;&quot;hostS&quot; url&#x3D;&quot;192.168.25.166:3306&quot; user&#x3D;&quot;root&quot;</span><br><span class="line">			password&#x3D;&quot;root&quot; &#x2F;&gt;</span><br><span class="line">		&lt;&#x2F;writeHost&gt;</span><br><span class="line">	&lt;&#x2F;dataHost&gt;</span><br></pre></td></tr></table></figure>
<p>readHost是从属于writeHost的，即意味着它从那个writeHost获取同步数据，因此，当它所属的writeHost宕机了，则它也不会再参与到读写分离中来，即“不工作了”，这是因为此时，它的数据已经“不可靠”了。基于这个考虑，目前mycat 1.3和1.4版本中，若想支持MySQL一主一从的标准配置，并且在主节点宕机的情况下，从节点还能读取数据，则需要在Mycat里配置为两个writeHost并设置banlance=1。</p>
<p>设置 switchType=”2” 与slaveThreshold=”100”</p>
<p>switchType 目前有三种选择：</p>
<p>-1：表示不自动切换</p>
<p>1 ：默认值，自动切换</p>
<p>2 ：基于MySQL主从同步的状态决定是否切换</p>
<p>Mycat心跳检查语句配置为 show slave status ，dataHost 上定义两个新属性: switchType=”2” 与slaveThreshold=”100”，此时意味着开启MySQL主从复制状态绑定的读写分离与切换机制。</p>
<p>Mycat心跳机制通过检测 show slave status 中的 “Seconds_Behind_Master”, “Slave_IO_Running”, “Slave_SQL_Running” 三个字段来确定当前主从同步的状态以及Seconds_Behind_Master主从复制时延。</p>

      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/07/03/2016-07-03-activemq/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/07/03/2016-07-03-activemq/" class="post-title-link" itemprop="url">ActiveMQ消息队列</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-07-03 18:00:00" itemprop="dateCreated datePublished" datetime="2016-07-03T18:00:00+08:00">2016-07-03</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E6%B6%88%E6%81%AF%E9%98%9F%E5%88%97/" itemprop="url" rel="index"><span itemprop="name">消息队列</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <p><img src="http://ww1.sinaimg.cn/mw690/63503acbjw1f67ojz4j8kj20bk03djrj.jpg"></p>
<h3 id="介绍"><a href="#介绍" class="headerlink" title="介绍"></a>介绍</h3><p>&nbsp;&nbsp;ActiveMQ 是Apache出品，最流行的，能力强劲的开源消息总线。ActiveMQ 是一个完全支持JMS1.1和J2EE 1.4规范的 JMS Provider实现,尽管JMS规范出台已经是很久的事情了,但是JMS在当今的J2EE应用中间仍然扮演着特殊的地位。</p>
<p>主要特点：</p>
<ol>
<li>多种语言和协议编写客户端。语言: Java, C, C++, C#, Ruby, Perl, Python, PHP。应用协议: OpenWire,Stomp REST,WS Notification,XMPP,AMQP</li>
<li>完全支持JMS1.1和J2EE 1.4规范 (持久化,XA消息,事务)</li>
<li>对Spring的支持,ActiveMQ可以很容易内嵌到使用Spring的系统里面去,而且也支持Spring2.0的特性</li>
<li>通过了常见J2EE服务器(如 Geronimo,JBoss 4, GlassFish,WebLogic)的测试,其中通过JCA 1.5 resource adaptors的配置,可以让ActiveMQ可以自动的部署到任何兼容J2EE 1.4 商业服务器上</li>
<li>支持多种传送协议:in-VM,TCP,SSL,NIO,UDP,JGroups,JXTA</li>
<li>支持通过JDBC和journal提供高速的消息持久化</li>
<li>从设计上保证了高性能的集群,客户端-服务器,点对点</li>
<li>支持Ajax</li>
<li>支持与Axis的整合</li>
<li>可以很容易得调用内嵌JMS provider,进行测试</li>
</ol>
<h3 id="什么是JMS规范"><a href="#什么是JMS规范" class="headerlink" title="什么是JMS规范"></a>什么是JMS规范</h3><p>&nbsp;&nbsp;JMS的全称是Java MessageService，即Java消息服务。用于在两个应用程序之间，或分布式系统中发送消息，进行异步通信。<br>&nbsp;&nbsp;它主要用于在生产者和消费者之间进行消息传递，生产者负责产生消息，而消费者负责接收消息。把它应用到实际的业务需求中的话我们可以在特定的时候利用生产者生成一消息，并进行发送，对应的消费者在接收到对应的消息后去完成对应的业务逻辑。<br>&nbsp;&nbsp;对于消息的传递有两种类型：<br>一种是点对点的，即一个生产者和一个消费者一一对应；<br>另一种是发布/订阅模式，即一个生产者产生消息并进行发送后，可以由多个消费者进行接收。<br>JMS定义了五种不同的消息正文格式，以及调用的消息类型，允许你发送并接收以一些不同形式的数据，提供现有消息格式的一些级别的兼容性。</p>
<ul>
<li>StreamMessage – Java原始值的数据流</li>
<li>MapMessage–一套名称-值对</li>
<li>TextMessage–一个字符串对象</li>
<li>ObjectMessage–一个序列化的 Java对象</li>
<li>BytesMessage–一个字节的数据流</li>
</ul>
<h3 id="JMS应用程序接口"><a href="#JMS应用程序接口" class="headerlink" title="JMS应用程序接口"></a>JMS应用程序接口</h3><p>&nbsp;&nbsp;<strong>ConnectionFactory</strong></p>
<p>&nbsp;&nbsp;&nbsp;&nbsp;用户用来创建到JMS提供者的连接的被管对象。JMS客户通过可移植的接口访问连接，这样当下层的实现改变时，代码不需要进行修改。 管理员在JNDI名字空间中配置连接工厂，这样，JMS客户才能够查找到它们。根据消息类型的不同，用户将使用队列连接工厂，或者主题连接工厂。</p>
<p>&nbsp;&nbsp;<strong>Connection</strong></p>
<p>&nbsp;&nbsp;&nbsp;&nbsp;连接代表了应用程序和消息服务器之间的通信链路。在获得了连接工厂后，就可以创建一个与JMS提供者的连接。根据不同的连接类型，连接允许用户创建会话，以发送和接收队列和主题到目标。</p>
<p>&nbsp;&nbsp;<strong>Destination</strong></p>
<p>&nbsp;&nbsp;&nbsp;&nbsp;目标是一个包装了消息目标标识符的被管对象，消息目标是指消息发布和接收的地点，或者是队列，或者是主题。JMS管理员创建这些对象，然后用户通过JNDI发现它们。和连接工厂一样，管理员可以创建两种类型的目标，点对点模型的队列，以及发布者／订阅者模型的主题。</p>
<p>&nbsp;&nbsp;<strong>MessageProducer</strong></p>
<p>&nbsp;&nbsp;&nbsp;&nbsp;由会话创建的对象，用于发送消息到目标。用户可以创建某个目标的发送者，也可以创建一个通用的发送者，在发送消息时指定目标。</p>
<p>&nbsp;&nbsp;<strong>MessageConsumer</strong></p>
<p>&nbsp;&nbsp;&nbsp;&nbsp;由会话创建的对象，用于接收发送到目标的消息。消费者可以同步地（阻塞模式），或异步（非阻塞）接收队列和主题类型的消息。</p>
<p>&nbsp;&nbsp;<strong>Message</strong></p>
<p>&nbsp;&nbsp;&nbsp;&nbsp;是在消费者和生产者之间传送的对象，也就是说从一个应用程序创送到另一个应用程序。一个消息有三个主要部分：</p>
<ul>
<li>消息头（必须）：包含用于识别和为消息寻找路由的操作设置。</li>
<li>一组消息属性（可选）：包含额外的属性，支持其他提供者和用户的兼容。可以创建定制的字段和过滤器（消息选择器）。</li>
<li>一个消息体（可选）：允许用户创建五种类型的消息（文本消息，映射消息，字节消息，流消息和对象消息）。</li>
</ul>
<p>消息接口非常灵活，并提供了许多方式来定制消息的内容。</p>
<p>&nbsp;&nbsp;<strong>Session</strong></p>
<p>&nbsp;&nbsp;&nbsp;&nbsp;表示一个单线程的上下文，用于发送和接收消息。由于会话是单线程的，所以消息是连续的，就是说消息是按照发送的顺序一个一个接收的。会话的好处是它支持事务。如果用户选择了事务支持，会话上下文将保存一组消息，直到事务被提交才发送这些消息。在提交事务之前，用户可以使用回滚操作取消这些消息。一个会话允许用户创建消息生产者来发送消息，创建消息消费者来接收消息。</p>
<h3 id="JMS消息发送模式"><a href="#JMS消息发送模式" class="headerlink" title="JMS消息发送模式"></a>JMS消息发送模式</h3><p><img src="http://ww2.sinaimg.cn/mw690/63503acbjw1f67ojyj0g8j20mf0fdwft.jpg"></p>
<p>&nbsp;&nbsp;在P2P模型下，一个生产者向一个特定的队列发布消息，一个消费者从该队列中读取消息。这里，生产者知道消费者的队列，并直接将消息发送到消费者的队列。这种模式被概括为：只有一个消费者将获得消息。生产者不需要在接收者消费该消息期间处于运行状态，接收者也同样不需要在消息发送时处于运行状态。每一个成功处理的消息都由接收者签收。</p>
<p>&nbsp;&nbsp;publish/subscribe模型支持向一个特定的消息主题发布消息。0或多个订阅者可能对接收来自特定消息主题的消息感兴趣。在这种模型下，发布者和订阅者彼此不知道对方。这种模式好比是匿名公告板。这种模式被概括为：多个消费者可以获得消息.在发布者和订阅者之间存在时间依赖性。发布者需要建立一个订阅（subscription），以便客户能够购订阅。订阅者必须保持持续的活动状态以接收消息，除非订阅者建立了持久的订阅。在那种情况下，在订阅者未连接时发布的消息将在订阅者重新连接时重新发布。</p>
<h3 id="安装ActiveMQ"><a href="#安装ActiveMQ" class="headerlink" title="安装ActiveMQ"></a>安装ActiveMQ</h3><ol>
<li><p>首先到官网 <a target="_blank" rel="noopener" href="http://activemq.apache.org/">http://activemq.apache.org/</a> 下载ActiveMQ.</p>
</li>
<li><p>因为ActiveMQ是JAVA开发的,所以依赖jdk环境。</p>
</li>
<li><p>解压ActiveMQ。</p>
</li>
<li><p>在ActiveMQ/bin目录中,./activemq start 开启ActiveMQ</p>
</li>
<li><p>在ActiveMQ/bin目录中,./activemq stop 关闭ActiveMQ</p>
</li>
<li><p>访问后台 <a target="_blank" rel="noopener" href="http://ip:8161/admin">http://ip:8161/admin</a>  ActiveMQ的默认后台端口为8161,Message端口为61616</p>
</li>
</ol>
<h3 id="使用ActiveMQ"><a href="#使用ActiveMQ" class="headerlink" title="使用ActiveMQ"></a>使用ActiveMQ</h3><p>&nbsp;&nbsp;使用ActiveMQ需要先引入ActiveMQ的jar包。</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line">       &lt;dependency&gt;</span><br><span class="line">	&lt;groupId&gt;org.apache.activemq&lt;&#x2F;groupId&gt;</span><br><span class="line">	&lt;artifactId&gt;activemq-all&lt;&#x2F;artifactId&gt;</span><br><span class="line">	&lt;version&gt;5.11.2&lt;&#x2F;version&gt;</span><br><span class="line">&lt;&#x2F;dependency&gt;</span><br></pre></td></tr></table></figure>
<p>以下示例使用Queue模式,如要使用Topic模式只需要将Destination改成Topic即可。</p>
<h4 id="1-Producer"><a href="#1-Producer" class="headerlink" title="1.Producer"></a>1.Producer</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br></pre></td><td class="code"><pre><span class="line">   <span class="meta">@Test</span></span><br><span class="line"><span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testProducer</span><span class="params">()</span> <span class="keyword">throws</span> JMSException</span>&#123;</span><br><span class="line">	<span class="comment">// 创建连接工厂</span></span><br><span class="line">	ConnectionFactory connectionFactory = </span><br><span class="line">				<span class="keyword">new</span> ActiveMQConnectionFactory(<span class="string">&quot;tcp://192.168.145.137:61616&quot;</span>);</span><br><span class="line">	<span class="comment">// 声明Connection</span></span><br><span class="line">	Connection connection = <span class="keyword">null</span>;</span><br><span class="line">	<span class="comment">// 声明Session</span></span><br><span class="line">	Session session = <span class="keyword">null</span>;</span><br><span class="line">	<span class="comment">// 声明Producer</span></span><br><span class="line">	MessageProducer producer = <span class="keyword">null</span>;</span><br><span class="line">	<span class="keyword">try</span>&#123;</span><br><span class="line">		<span class="comment">// 从连接工厂中获得连接</span></span><br><span class="line">		connection = connectionFactory.createConnection();</span><br><span class="line">		<span class="comment">// 开启连接</span></span><br><span class="line">		connection.start();</span><br><span class="line">		<span class="comment">/*</span></span><br><span class="line"><span class="comment">		 * 从连接中获得会话</span></span><br><span class="line"><span class="comment">		 * 参数1:transacted  boolean型</span></span><br><span class="line"><span class="comment">		 * 当设置为true时,将忽略参数2,acknowledgment mode被jms服务器设置 SESSION_TRANSACTED。</span></span><br><span class="line"><span class="comment">		 * 当一个事务被提交时,消息确认就会自动发生。</span></span><br><span class="line"><span class="comment">		 * 当设置为false时,需要设置参数2</span></span><br><span class="line"><span class="comment">		 * Session.AUTO_ACKNOWLEDGE为自动确认，当客户成功的从receive方法返回的时候，或者从</span></span><br><span class="line"><span class="comment">		 * MessageListener.onMessage方法成功返回的时候，会话自动确认客户收到的消息。</span></span><br><span class="line"><span class="comment">		 * Session.CLIENT_ACKNOWLEDGE 为客户端确认。客户端接收到消息后，必须调用javax.jms.Message的</span></span><br><span class="line"><span class="comment">		 * acknowledge方法。jms服务器才会删除消息。（默认是批量确认）</span></span><br><span class="line"><span class="comment">		 */</span></span><br><span class="line">		session = connection.createSession(<span class="keyword">false</span>,Session.AUTO_ACKNOWLEDGE);</span><br><span class="line">		<span class="comment">// 创建一个Destination目的地 Queue或者Topic</span></span><br><span class="line">		Queue queue = session.createQueue(<span class="string">&quot;testMessage&quot;</span>);</span><br><span class="line">		<span class="comment">// 创建一个Producer生产者</span></span><br><span class="line">		producer = session.createProducer(queue);</span><br><span class="line">		<span class="comment">// 创建message</span></span><br><span class="line">		ActiveMQTextMessage textMessage = <span class="keyword">new</span> ActiveMQTextMessage();</span><br><span class="line">		textMessage.setText(<span class="string">&quot;test&quot;</span>);</span><br><span class="line">		<span class="comment">// 发送message</span></span><br><span class="line">		producer.send(textMessage);</span><br><span class="line">	&#125;<span class="keyword">catch</span>(Exception e)&#123;</span><br><span class="line">		e.printStackTrace();</span><br><span class="line">	&#125;<span class="keyword">finally</span>&#123;</span><br><span class="line">		<span class="comment">// 回收资源</span></span><br><span class="line">		producer.close();</span><br><span class="line">		session.close();</span><br><span class="line">		connection.close();</span><br><span class="line">	&#125;</span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h4 id="2-Consumer"><a href="#2-Consumer" class="headerlink" title="2.Consumer"></a>2.Consumer</h4><p>&nbsp;&nbsp;消费者有两种消费方式:</p>
<ol>
<li><p>同步消费。通过调用消费者的receive方法从目的地中显式提取消息。receive方法可以一直阻塞到消息到达。</p>
</li>
<li><p>异步消费。客户可以为消费者注册一个消息监听器，以定义在消息到达时所采取的动作。<br>  实现MessageListener接口，在MessageListener（）方法中实现消息的处理逻辑。</p>
</li>
</ol>
<p>同步消费</p>
<figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br></pre></td><td class="code"><pre><span class="line"><span class="meta">@Test</span></span><br><span class="line"><span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testSyncConsumer</span><span class="params">()</span> <span class="keyword">throws</span> JMSException</span>&#123;</span><br><span class="line">	<span class="comment">// 创建连接工厂</span></span><br><span class="line">	ConnectionFactory connectionFactory = </span><br><span class="line">					<span class="keyword">new</span> ActiveMQConnectionFactory(<span class="string">&quot;tcp://192.168.145.137:61616&quot;</span>);</span><br><span class="line">	Connection connection = <span class="keyword">null</span>;</span><br><span class="line">	Session session = <span class="keyword">null</span>;</span><br><span class="line">	MessageConsumer consumer = <span class="keyword">null</span>;</span><br><span class="line">	<span class="keyword">try</span>&#123;</span><br><span class="line">		<span class="comment">// 获得连接</span></span><br><span class="line">		connection = connectionFactory.createConnection();</span><br><span class="line">		<span class="comment">// 开启连接</span></span><br><span class="line">		connection.start();</span><br><span class="line">		<span class="comment">// 获得Session</span></span><br><span class="line">		session = connection.createSession(<span class="keyword">false</span>,Session.AUTO_ACKNOWLEDGE);</span><br><span class="line">		<span class="comment">// 创建一个目的地</span></span><br><span class="line">		Queue queue = session.createQueue(<span class="string">&quot;testSynchronization&quot;</span>);</span><br><span class="line">		<span class="comment">// 创建消费者</span></span><br><span class="line">		consumer = session.createConsumer(queue);</span><br><span class="line">		<span class="comment">// 使用receive同步消费</span></span><br><span class="line">		<span class="keyword">while</span>(<span class="keyword">true</span>)&#123;</span><br><span class="line">			<span class="comment">// 设置接收信息的时间,单位为毫秒</span></span><br><span class="line">			Message message = consumer.receive(<span class="number">10000</span>);</span><br><span class="line">			<span class="keyword">if</span>(message != <span class="keyword">null</span>)&#123;</span><br><span class="line">				System.out.println(message);</span><br><span class="line">			&#125;<span class="keyword">else</span>&#123;</span><br><span class="line">				<span class="comment">// 超时,结束循环</span></span><br><span class="line">				<span class="keyword">break</span>;</span><br><span class="line">			&#125;</span><br><span class="line">		&#125;</span><br><span class="line">	&#125;<span class="keyword">catch</span>(Exception e)&#123;</span><br><span class="line">		e.printStackTrace();</span><br><span class="line">	&#125;<span class="keyword">finally</span>&#123;</span><br><span class="line">		<span class="comment">// 回收资源</span></span><br><span class="line">		consumer.close();</span><br><span class="line">		session.close();</span><br><span class="line">		connection.close();</span><br><span class="line">	&#125;</span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<p>异步消费</p>
<figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br></pre></td><td class="code"><pre><span class="line">   <span class="meta">@Test</span></span><br><span class="line"><span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testAsyncConsumer</span><span class="params">()</span> <span class="keyword">throws</span> JMSException</span>&#123;</span><br><span class="line">	<span class="comment">// 创建连接工厂</span></span><br><span class="line">	ConnectionFactory connectionFactory = </span><br><span class="line">				<span class="keyword">new</span> ActiveMQConnectionFactory(<span class="string">&quot;tcp://192.168.145.137:61616&quot;</span>);</span><br><span class="line">	Connection connection = <span class="keyword">null</span>;</span><br><span class="line">	Session session = <span class="keyword">null</span>;</span><br><span class="line">	MessageConsumer consumer = <span class="keyword">null</span>;</span><br><span class="line">	<span class="keyword">try</span>&#123;</span><br><span class="line">		<span class="comment">// 获得连接</span></span><br><span class="line">		connection = connectionFactory.createConnection();</span><br><span class="line">		<span class="comment">// 开启连接</span></span><br><span class="line">		connection.start();</span><br><span class="line">		<span class="comment">// 获得Session</span></span><br><span class="line">		session = connection.createSession(<span class="keyword">false</span>,Session.AUTO_ACKNOWLEDGE);</span><br><span class="line">		<span class="comment">// 设置目的地</span></span><br><span class="line">		Queue queue = session.createQueue(<span class="string">&quot;testAsynchronization&quot;</span>);</span><br><span class="line">		<span class="comment">// 创建Consumer</span></span><br><span class="line">		consumer = session.createConsumer(queue);</span><br><span class="line">		<span class="comment">// 异步消费</span></span><br><span class="line">		session.setMessageListener(<span class="keyword">new</span> MessageListener() &#123;</span><br><span class="line">			</span><br><span class="line">			<span class="meta">@Override</span></span><br><span class="line">			<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">onMessage</span><span class="params">(Message message)</span> </span>&#123;</span><br><span class="line">				<span class="keyword">if</span>(message <span class="keyword">instanceof</span> TextMessage)&#123;</span><br><span class="line">					<span class="keyword">try</span> &#123;</span><br><span class="line">						String text = ((TextMessage) message).getText();</span><br><span class="line">						System.out.println(text);</span><br><span class="line">					&#125; <span class="keyword">catch</span> (JMSException e) &#123;</span><br><span class="line">						e.printStackTrace();</span><br><span class="line">					&#125;</span><br><span class="line">				&#125;</span><br><span class="line">			&#125;</span><br><span class="line">		&#125;);</span><br><span class="line">		System.in.read();</span><br><span class="line">	&#125;<span class="keyword">catch</span>(Exception e)&#123;</span><br><span class="line">		e.printStackTrace();</span><br><span class="line">	&#125;<span class="keyword">finally</span>&#123;</span><br><span class="line">		<span class="comment">// 回收资源</span></span><br><span class="line">		consumer.close();</span><br><span class="line">		session.close();</span><br><span class="line">		connection.close();</span><br><span class="line">	&#125;</span><br><span class="line">&#125;</span><br><span class="line"></span><br></pre></td></tr></table></figure>
<h3 id="整合Spring"><a href="#整合Spring" class="headerlink" title="整合Spring"></a>整合Spring</h3><h4 id="1-配置ConnectionFactory"><a href="#1-配置ConnectionFactory" class="headerlink" title="1.配置ConnectionFactory"></a>1.配置ConnectionFactory</h4><figure class="highlight"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br></pre></td><td class="code"><pre><span class="line">&lt;?xml version=<span class="string">&quot;1.0&quot;</span> encoding=<span class="string">&quot;UTF-8&quot;</span>?&gt;</span><br><span class="line">&lt;beans xmlns=<span class="string">&quot;http://www.springframework.org/schema/beans&quot;</span></span><br><span class="line">	xmlns:context=<span class="string">&quot;http://www.springframework.org/schema/context&quot;</span> xmlns:p=<span class="string">&quot;http://www.springframework.org/schema/p&quot;</span></span><br><span class="line">	xmlns:aop=<span class="string">&quot;http://www.springframework.org/schema/aop&quot;</span> xmlns:tx=<span class="string">&quot;http://www.springframework.org/schema/tx&quot;</span></span><br><span class="line">	xmlns:jms=<span class="string">&quot;http://www.springframework.org/schema/jms&quot;</span> xmlns:xsi=<span class="string">&quot;http://www.w3.org/2001/XMLSchema-instance&quot;</span></span><br><span class="line">	xsi:schemaLocation=<span class="string">&quot;http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.0.xsd</span></span><br><span class="line"><span class="string">	http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-4.0.xsd</span></span><br><span class="line"><span class="string">	http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-4.0.xsd </span></span><br><span class="line"><span class="string">	http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-4.0.xsd</span></span><br><span class="line"><span class="string">	http://www.springframework.org/schema/jms http://www.springframework.org/schema/jms/spring-jms-4.0.xsd</span></span><br><span class="line"><span class="string">	http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-4.0.xsd&quot;</span>&gt;</span><br><span class="line"></span><br><span class="line"></span><br><span class="line">	&lt;!-- ActiveMQ提供的ConnectionFactory --&gt;</span><br><span class="line">	&lt;bean id=<span class="string">&quot;targetConnectionFactory&quot;</span> <span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.apache.activemq.ActiveMQConnectionFactory&quot;</span>&gt;</span><br><span class="line">		&lt;property name=<span class="string">&quot;brokerURL&quot;</span> value=<span class="string">&quot;tcp://192.168.145.137:61616&quot;</span> /&gt;</span><br><span class="line">	&lt;/bean&gt;</span><br><span class="line">	&lt;!-- Spring的ConnectionFactory需要注入ActiveMQ的ConnectionFactory --&gt;</span><br><span class="line">	&lt;bean id=<span class="string">&quot;connectionFactory&quot;</span></span><br><span class="line">		<span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.springframework.jms.connection.SingleConnectionFactory&quot;</span>&gt;</span><br><span class="line">		&lt;!-- 目标ConnectionFactory对应真实的可以产生JMS Connection的ConnectionFactory --&gt;</span><br><span class="line">		&lt;property name=<span class="string">&quot;targetConnectionFactory&quot;</span> ref=<span class="string">&quot;targetConnectionFactory&quot;</span> /&gt;</span><br><span class="line">	&lt;/bean&gt;</span><br><span class="line">&lt;/beans&gt;</span><br></pre></td></tr></table></figure>
<h4 id="2-配置生产者"><a href="#2-配置生产者" class="headerlink" title="2.配置生产者"></a>2.配置生产者</h4><figure class="highlight"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br></pre></td><td class="code"><pre><span class="line">   &lt;!-- 配置生产者 --&gt;</span><br><span class="line">&lt;!-- Spring提供的JMS工具类，它可以进行消息发送、接收等 --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;jmsTemplate&quot;</span> <span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.springframework.jms.core.JmsTemplate&quot;</span>&gt;</span><br><span class="line">	&lt;!-- 注入Spring的连接工厂 --&gt;</span><br><span class="line">	&lt;property name=<span class="string">&quot;connectionFactory&quot;</span> ref=<span class="string">&quot;connectionFactory&quot;</span> /&gt;</span><br><span class="line">&lt;/bean&gt;</span><br><span class="line">&lt;!--P2P模式的Destination --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;queueDestination&quot;</span> <span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.apache.activemq.command.ActiveMQQueue&quot;</span>&gt;</span><br><span class="line">	&lt;constructor-arg&gt;</span><br><span class="line">		&lt;value&gt;queue&lt;/value&gt;</span><br><span class="line">	&lt;/constructor-arg&gt;</span><br><span class="line">&lt;/bean&gt;</span><br><span class="line">&lt;!-- publish/subscribe模式的Destination --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;topicDestination&quot;</span> <span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.apache.activemq.command.ActiveMQTopic&quot;</span>&gt;</span><br><span class="line">	&lt;constructor-arg value=<span class="string">&quot;topic&quot;</span> /&gt;</span><br><span class="line">&lt;/bean&gt;</span><br></pre></td></tr></table></figure>
<h4 id="3-发送消息"><a href="#3-发送消息" class="headerlink" title="3.发送消息"></a>3.发送消息</h4><figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br></pre></td><td class="code"><pre><span class="line"><span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">testSend</span><span class="params">()</span></span>&#123;</span><br><span class="line">		<span class="comment">// 读取Spring配置文件</span></span><br><span class="line">		ApplicationContext applicationContext = </span><br><span class="line">					<span class="keyword">new</span> ClassPathXmlApplicationContext(<span class="string">&quot;applicationContext.xml&quot;</span>);</span><br><span class="line">		<span class="comment">// 获得JmsTemplate</span></span><br><span class="line">		JmsTemplate jmsTemplate = applicationContext.getBean(JmsTemplate.class);</span><br><span class="line">		<span class="comment">// 获得Destination</span></span><br><span class="line">		ActiveMQQueue queue = applicationContext.getBean(ActiveMQQueue.class);</span><br><span class="line">		<span class="comment">// 发送消息</span></span><br><span class="line">		jmsTemplate.send(queue, <span class="keyword">new</span> MessageCreator() &#123;</span><br><span class="line">			</span><br><span class="line">			<span class="meta">@Override</span></span><br><span class="line">			<span class="function"><span class="keyword">public</span> Message <span class="title">createMessage</span><span class="params">(Session session)</span> <span class="keyword">throws</span> JMSException </span>&#123;</span><br><span class="line">				<span class="keyword">return</span> session.createTextMessage(<span class="string">&quot;send-spring&quot;</span>);</span><br><span class="line">			&#125;</span><br><span class="line">		&#125;);</span><br><span class="line">	&#125;</span><br></pre></td></tr></table></figure>
<h4 id="4-配置消费者"><a href="#4-配置消费者" class="headerlink" title="4.配置消费者"></a>4.配置消费者</h4><p>&nbsp;&nbsp;Spring通过MessageListenerContainer接收信息,并把接收到的信息分发给MessageListener进行处理。每个消费者对应每个目的地都需要有对应的MessageListenerContainer。</p>
<figure class="highlight"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br></pre></td><td class="code"><pre><span class="line">&lt;!-- ActiveMQ提供的ConnectionFactory --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;targetConnectionFactory&quot;</span> <span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.apache.activemq.ActiveMQConnectionFactory&quot;</span>&gt;</span><br><span class="line">	&lt;property name=<span class="string">&quot;brokerURL&quot;</span> value=<span class="string">&quot;tcp://192.168.145.137:61616&quot;</span> /&gt;</span><br><span class="line">&lt;/bean&gt;</span><br><span class="line">&lt;!-- Spring的ConnectionFactory需要注入ActiveMQ的ConnectionFactory --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;connectionFactory&quot;</span></span><br><span class="line">	<span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.springframework.jms.connection.SingleConnectionFactory&quot;</span>&gt;</span><br><span class="line">	&lt;!-- 目标ConnectionFactory对应真实的可以产生JMS Connection的ConnectionFactory --&gt;</span><br><span class="line">	&lt;property name=<span class="string">&quot;targetConnectionFactory&quot;</span> ref=<span class="string">&quot;targetConnectionFactory&quot;</span> /&gt;</span><br><span class="line">&lt;/bean&gt;</span><br><span class="line">   &lt;!-- 配置生产者 --&gt;</span><br><span class="line">&lt;!-- Spring提供的JMS工具类，它可以进行消息发送、接收等 --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;jmsTemplate&quot;</span> <span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.springframework.jms.core.JmsTemplate&quot;</span>&gt;</span><br><span class="line">	&lt;!-- 注入Spring的连接工厂 --&gt;</span><br><span class="line">	&lt;property name=<span class="string">&quot;connectionFactory&quot;</span> ref=<span class="string">&quot;connectionFactory&quot;</span> /&gt;</span><br><span class="line">&lt;/bean&gt;</span><br><span class="line">&lt;!--P2P模式的Destination --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;queueDestination&quot;</span> <span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.apache.activemq.command.ActiveMQQueue&quot;</span>&gt;</span><br><span class="line">	&lt;constructor-arg&gt;</span><br><span class="line">		&lt;value&gt;queue&lt;/value&gt;</span><br><span class="line">	&lt;/constructor-arg&gt;</span><br><span class="line">&lt;/bean&gt;</span><br><span class="line">&lt;!-- publish/subscribe模式的Destination --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;topicDestination&quot;</span> <span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.apache.activemq.command.ActiveMQTopic&quot;</span>&gt;</span><br><span class="line">	&lt;constructor-arg value=<span class="string">&quot;topic&quot;</span> /&gt;</span><br><span class="line">&lt;/bean&gt;</span><br><span class="line">&lt;!-- 配置监听器 --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;myMessageListener&quot;</span> <span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;com.activemq.MyMessageListener&quot;</span> /&gt;</span><br><span class="line">&lt;!-- 消息监听容器 --&gt;</span><br><span class="line">&lt;bean id=<span class="string">&quot;jmsContainer&quot;</span></span><br><span class="line">	<span class="class"><span class="keyword">class</span></span>=<span class="string">&quot;org.springframework.jms.listener.DefaultMessageListenerContainer&quot;</span>&gt;</span><br><span class="line">	&lt;property name=<span class="string">&quot;connectionFactory&quot;</span> ref=<span class="string">&quot;connectionFactory&quot;</span> /&gt;</span><br><span class="line">	&lt;property name=<span class="string">&quot;destination&quot;</span> ref=<span class="string">&quot;queueDestination&quot;</span> /&gt;</span><br><span class="line">	&lt;property name=<span class="string">&quot;messageListener&quot;</span> ref=<span class="string">&quot;myMessageListener&quot;</span> /&gt;</span><br><span class="line">&lt;/bean&gt;</span><br></pre></td></tr></table></figure>
<p>监听器需要实现MessageListener接口。</p>
<figure class="highlight java"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line"><span class="keyword">public</span> <span class="class"><span class="keyword">class</span> <span class="title">MyMessageListener</span> <span class="keyword">implements</span> <span class="title">MessageListener</span> </span>&#123;</span><br><span class="line">	<span class="meta">@Override</span></span><br><span class="line">	<span class="function"><span class="keyword">public</span> <span class="keyword">void</span> <span class="title">onMessage</span><span class="params">(Message message)</span> </span>&#123;</span><br><span class="line">		System.out.println(message);</span><br><span class="line">	&#125;</span><br><span class="line">&#125;</span><br></pre></td></tr></table></figure>
<h3 id="Exception"><a href="#Exception" class="headerlink" title="Exception"></a>Exception</h3><p>&nbsp;&nbsp;启动ActiveMQ时,如果发生java.net.UnknownHostException异常。<br>解决方法:<br>修改 /etc/hosts 文件 添加一行 192.168.1.1(主机IP) 主机名.localdomain 主机名<br>例: 192.168.145.137 ActiveMQ.localdomain ActiveMQ</p>

      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




    


<div class="post-block">
  
  

  <article itemscope itemtype="http://schema.org/Article" class="post-content" lang="zh">
    <link itemprop="mainEntityOfPage" href="https://suyuhuan.gitee.io/yuwanzi.io/2016/06/30/2016-06-30-solrcloud/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="image" content="/yuwanzi.io/images/avatar.gif">
      <meta itemprop="name" content="玉丸子">
      <meta itemprop="description" content="这里是玉丸子的个人博客,与你一起发现更大的世界。">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="玉丸子 | Blog">
    </span>
      <header class="post-header">
        <h2 class="post-title" itemprop="name headline">
          <a href="/yuwanzi.io/2016/06/30/2016-06-30-solrcloud/" class="post-title-link" itemprop="url">SolrCloud初体验</a>
        </h2>

        <div class="post-meta-container">
          <div class="post-meta">
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-calendar"></i>
      </span>
      <span class="post-meta-item-text">Veröffentlicht am</span>

      <time title="Erstellt: 2016-06-30 18:00:00" itemprop="dateCreated datePublished" datetime="2016-06-30T18:00:00+08:00">2016-06-30</time>
    </span>
      <span class="post-meta-item">
        <span class="post-meta-item-icon">
          <i class="far fa-calendar-check"></i>
        </span>
        <span class="post-meta-item-text">Bearbeitet am</span>
        <time title="Geändert am: 2020-11-07 08:58:17" itemprop="dateModified" datetime="2020-11-07T08:58:17+08:00">2020-11-07</time>
      </span>
    <span class="post-meta-item">
      <span class="post-meta-item-icon">
        <i class="far fa-folder"></i>
      </span>
      <span class="post-meta-item-text">in</span>
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/" itemprop="url" rel="index"><span itemprop="name">后端</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%85%A8%E6%96%87%E6%A3%80%E7%B4%A2/" itemprop="url" rel="index"><span itemprop="name">全文检索</span></a>
        </span>
          . 
        <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
          <a href="/yuwanzi.io/categories/%E5%90%8E%E7%AB%AF/%E5%85%A8%E6%96%87%E6%A3%80%E7%B4%A2/Solr/" itemprop="url" rel="index"><span itemprop="name">Solr</span></a>
        </span>
    </span>

  
</div>

        </div>
      </header>

    
    
    
    <div class="post-body" itemprop="articleBody">
          <p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f67ocs5srhj207g05m3yl.jpg"></p>
<h3 id="什么是SolrCloud"><a href="#什么是SolrCloud" class="headerlink" title="什么是SolrCloud"></a>什么是SolrCloud</h3><p>SolrCloud是基于Solr和Zookeeper的分布式搜索方案,它的主要思想是使用Zookeeper作为集群的配置信息中心。</p>
<h3 id="SolrCloud的特点"><a href="#SolrCloud的特点" class="headerlink" title="SolrCloud的特点"></a>SolrCloud的特点</h3><h4 id="1-近实时搜索"><a href="#1-近实时搜索" class="headerlink" title="1.近实时搜索"></a>1.近实时搜索</h4><p>立即推送式的replication（也支持慢推送）。可以在秒内检索到新加入索引。</p>
<h4 id="2-自动容错"><a href="#2-自动容错" class="headerlink" title="2.自动容错"></a>2.自动容错</h4><p>SolrCloud对索引分片,并对每个分片创建多个Replication。每个Replication都可以对外提供服务。一个Replication挂掉不会影响索引服务。更强大的是，它还能自动的在其它机器上帮你把失败机器上的索引Replication重建并投入使用。</p>
<h4 id="3-查询时自动负载均衡"><a href="#3-查询时自动负载均衡" class="headerlink" title="3.查询时自动负载均衡"></a>3.查询时自动负载均衡</h4><p>SolrCloud索引的多个Replication可以分布在多台机器上,均衡查询压力。如果查询压力大，可以通过扩展机器，增加Replication来减缓。</p>
<h4 id="4-集中式的配置信息"><a href="#4-集中式的配置信息" class="headerlink" title="4.集中式的配置信息"></a>4.集中式的配置信息</h4><p>SolrCloud可以将配置文件上传到Zookeeper,由Zookeeper对配置文件进行管理。</p>
<h3 id="结构分析"><a href="#结构分析" class="headerlink" title="结构分析"></a>结构分析</h3><p>为了减少处理压力,SolrCloud需要由多台服务器共同完成索引和搜索。</p>
<h4 id="1-实现思路"><a href="#1-实现思路" class="headerlink" title="1.实现思路"></a>1.实现思路</h4><p>SolrCloud将索引数据进行分片(Shard),每个分片由多台服务器共同完成。</p>
<h4 id="2-结构"><a href="#2-结构" class="headerlink" title="2.结构"></a>2.结构</h4><p><img src="http://ww4.sinaimg.cn/mw690/63503acbjw1f67oct3xegj20rx0qfdht.jpg"></p>
<p><strong>物理结构</strong><br>SolrCloud由三个Solr服务器组成,每个Solr服务器包含2个Core。</p>
<p><strong>逻辑结构</strong><br>一个Collection包含2个Shard,每个Shard由3个core组成(一个Leader,两个Replication)。</p>
<p><strong>Collection</strong><br>Collection是一个在逻辑意义上完整的索引结构,它常常被划分为一个或多个Shard分片,它们使用相同的配置信息。如果Shard数超过一个，它就是分布式索引，SolrCloud让你通过Collection名称引用它，而不需要关心分布式检索时需要使用的和Shard相关参数。</p>
<p><strong>Core</strong><br>一个Solr中包含一个或者多个Solr Core，每个Solr Core可以独立提供索引和查询功能，每个Solr Core对应一个索引或者Collection的Shard，Solr Core的提出是为了增加管理灵活性和共用资源。在SolrCloud中有个不同点是它使用的配置是在Zookeeper中的，传统的Solr core的配置文件是在磁盘上的配置目录中。</p>
<p><strong>Shard</strong><br>Collection的逻辑分片。每个Shard被化成一个或者多个replicas，通过选举确定哪个是Leader。</p>
<p><strong>Replication</strong><br>在master-slave结构中,Replication是一个从节点,同一个Shard下主从节点存储的数据是一致的。</p>
<p><strong>Leader</strong><br>在master-slave结构中,Leader是一个主节点,Leader是赢得选举的Replication。选举可以发生在任何时间，但是通常它们仅在某个Solr实例发生故障时才会触发。当索引documents时，SolrCloud会传递它们到此Shard对应的Leader，Leader再分发它们到全部Shard的Replication。</p>
<h3 id="SolrCloud的搭建"><a href="#SolrCloud的搭建" class="headerlink" title="SolrCloud的搭建"></a>SolrCloud的搭建</h3><h4 id="1-Zookeeper"><a href="#1-Zookeeper" class="headerlink" title="1.Zookeeper"></a>1.Zookeeper</h4><p>SolrCloud需要Zookeeper进行管理,所以需要先安装Zookeeper。</p>
<ul>
<li><p>.解压缩zookeeper.tar.gz,并复制出3个Zookeeper实例。</p>
</li>
<li><p>.进入zookeeper01目录,创建一个data文件夹,并在data中创建一个myid文件,内容为1(其他Zookeeper实例为2和3)。</p>
</li>
<li><p>进入conf文件夹,将zoo_sample.cfg改名为zoo.cfg</p>
</li>
<li><p>vim zoo.cfg 修改dataDir=data文件夹所在的目录,<br>添加:<br>server.myid的值=ip:每个Zookeeper服务器之间的通讯端口:Zookeeper与其他应用的通讯端口。(每个Zookeeper实例都需要添加这行内容)<br>例如:</p>
<p><img src="http://ww2.sinaimg.cn/mw690/63503acbjw1f67ocpgc1zj20d80cwdjc.jpg"></p>
</li>
</ul>
<h4 id="2-Solr实例"><a href="#2-Solr实例" class="headerlink" title="2.Solr实例"></a>2.Solr实例</h4><ul>
<li><p>安装一个单机的Solr实例,并复制成4份,分别对应4个SolrHome。</p>
</li>
<li><p>修改SolrHome的solr.xml文件</p>
<p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f67ocqpcfqj20he097acw.jpg"></p>
</li>
<li><p>将配置文件上传到Zookeeper,当配置文件发生改变时,需要重新上传。</p>
</li>
</ul>
<pre><code> java -classpath .:/usr/local/solr-cloud/solr-lib/* org.apache.solr.cloud.ZkCLI -cmd upconfig -zkhost 127.0.0.1:2181,127.0.0.1:2182,127.0.0.1:2183 -confdir /usr/local/solr-cloud/solrhome01/collection1/conf  -confname solr-conf

-cmd upconfig 上传配置文件命令
-zkhost Zookeeper集群的ip与端口
-confdir 配置文件的目录
-confname 上传到Zookeeper后的文件夹名称

其中参数/usr/local/solr-cloud/solr-lib/可以自己创建，内容如下：
        复制tomcat/webapps/solr/WEB-INF/lib下所有jar包
        复制example/lib/ext下所有jar包
        复制example/resources/log4j.properties</code></pre>
<ul>
<li><p>通知Solr实例Zookeeper的地址,需要修改tomcat/bin/catalina.sh<br>添加一行:JAVA_OPTS=”-DzkHost=Zookeeper集群的地址列表”</p>
<p><img src="http://ww3.sinaimg.cn/mw690/63503acbjw1f67ocr82b3j20ig04ptas.jpg"></p>
</li>
</ul>
<h4 id="3-设置Shard"><a href="#3-设置Shard" class="headerlink" title="3.设置Shard"></a>3.设置Shard</h4><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">http:&#x2F;&#x2F;web容器&#x2F;solr&#x2F;admin&#x2F;collections?action&#x3D;CREATE&amp;name&#x3D;Collection名称&amp;numShards&#x3D;Shard个数&amp;replicationFactor&#x3D;Replication个数</span><br></pre></td></tr></table></figure>
<p>例:</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">http:&#x2F;&#x2F;192.168.145.150:8080&#x2F;solr&#x2F;admin&#x2F;collections?action&#x3D;CREATE&amp;name&#x3D;collection2&amp;numShards&#x3D;2&amp;replicationFactor&#x3D;2</span><br><span class="line">上面的命令为 创建一个name为collection2的Collection,并分成了2个Shard,每个Shard有2个Replication</span><br></pre></td></tr></table></figure>
<p>删除一个Collection</p>
<figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">http:&#x2F;&#x2F;192.168.145.150:8080&#x2F;solr&#x2F;admin&#x2F;collections?action&#x3D;DELETE&amp;name&#x3D;collection1</span><br><span class="line">上面的命令为 删除一个name为collection1的Collection</span><br></pre></td></tr></table></figure>
<h3 id="Spring整合SolrCloud"><a href="#Spring整合SolrCloud" class="headerlink" title="Spring整合SolrCloud"></a>Spring整合SolrCloud</h3><figure class="highlight plain"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line">&lt;!-- SolrCloud --&gt;</span><br><span class="line">	&lt;bean id&#x3D;&quot;cloudSolrServer&quot; class&#x3D;&quot;org.apache.solr.client.solrj.impl.CloudSolrServer&quot;&gt;</span><br><span class="line">		&lt;constructor-arg name&#x3D;&quot;zkHost&quot;</span><br><span class="line">			value&#x3D;&quot;192.168.145.136:2181,192.168.145.136:2182,192.168.145.136:2183&quot; &#x2F;&gt;</span><br><span class="line">		&lt;!-- 设置默认搜索的Collection --&gt;</span><br><span class="line">		&lt;property name&#x3D;&quot;defaultCollection&quot; value&#x3D;&quot;collection2&quot; &#x2F;&gt;</span><br><span class="line">	&lt;&#x2F;bean&gt;</span><br></pre></td></tr></table></figure>
      
    </div>

    
    
    

    <footer class="post-footer">
        <div class="post-eof"></div>
      
    </footer>
  </article>
</div>




  <nav class="pagination">
    <a class="extend prev" rel="prev" href="/yuwanzi.io/page/5/"><i class="fa fa-angle-left" aria-label="Vorherige Seite"></i></a><a class="page-number" href="/yuwanzi.io/">1</a><span class="space">&hellip;</span><a class="page-number" href="/yuwanzi.io/page/5/">5</a><span class="page-number current">6</span><a class="page-number" href="/yuwanzi.io/page/7/">7</a><a class="extend next" rel="next" href="/yuwanzi.io/page/7/"><i class="fa fa-angle-right" aria-label="Nächste Seite"></i></a>
  </nav>


<script>
  window.addEventListener('tabs:register', () => {
    let { activeClass } = CONFIG.comments;
    if (CONFIG.comments.storage) {
      activeClass = localStorage.getItem('comments_active') || activeClass;
    }
    if (activeClass) {
      const activeTab = document.querySelector(`a[href="#comment-${activeClass}"]`);
      if (activeTab) {
        activeTab.click();
      }
    }
  });
  if (CONFIG.comments.storage) {
    window.addEventListener('tabs:click', event => {
      if (!event.target.matches('.tabs-comment .tab-content .tab-pane')) return;
      const commentClass = event.target.classList[1];
      localStorage.setItem('comments_active', commentClass);
    });
  }
</script>
</div>
  </main>

  <footer class="footer">
    <div class="footer-inner">


<div class="copyright">
  &copy; 
  <span itemprop="copyrightYear">2021</span>
  <span class="with-love">
    <i class="fa fa-heart"></i>
  </span>
  <span class="author" itemprop="copyrightHolder">玉丸子</span>
</div>
  <div class="powered-by">Erstellt mit  <a href="https://hexo.io/" class="theme-link" rel="noopener" target="_blank">Hexo</a> & <a href="https://theme-next.js.org/muse/" class="theme-link" rel="noopener" target="_blank">NexT.Muse</a>
  </div>

    </div>
  </footer>

  
  <script src="//cdn.jsdelivr.net/npm/animejs@3.2.1/lib/anime.min.js"></script>
<script src="/yuwanzi.io/js/utils.js"></script><script src="/yuwanzi.io/js/motion.js"></script><script src="/yuwanzi.io/js/schemes/muse.js"></script><script src="/yuwanzi.io/js/next-boot.js"></script>

  






  





</body>
</html>
