<!DOCTYPE html>



  


<html class="theme-next gemini use-motion" lang="zh-Hans">
<head><meta name="generator" content="Hexo 3.9.0">
  <meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<meta name="theme-color" content="#222">









<meta http-equiv="Cache-Control" content="no-transform">
<meta http-equiv="Cache-Control" content="no-siteapp">
















  
  
  <link href="/blog/lib/fancybox/source/jquery.fancybox.css?v=2.1.5" rel="stylesheet" type="text/css">







<link href="/blog/lib/font-awesome/css/font-awesome.min.css?v=4.6.2" rel="stylesheet" type="text/css">

<link href="/blog/css/main.css?v=5.1.4" rel="stylesheet" type="text/css">


  <link rel="apple-touch-icon" sizes="180x180" href="/blog/images/apple-touch-icon-next.png?v=5.1.4">


  <link rel="icon" type="image/png" sizes="32x32" href="/blog/images/favicon-32x32-next.png?v=5.1.4">


  <link rel="icon" type="image/png" sizes="16x16" href="/blog/images/favicon-16x16-next.png?v=5.1.4">


  <link rel="mask-icon" href="/blog/images/logo.svg?v=5.1.4" color="#222">





  <meta name="keywords" content="Hadoop3.0入门,">










<meta name="description" content="本文为慕课网《快速入门Hadoop3.0大数据处理》的第三章，主要讲解：Hadoop3.0伪分布式集群安装部署、Hadoop分布式集群安装部署 该课程地址：https://www.imooc.com/learn/1159">
<meta name="keywords" content="Hadoop3.0入门">
<meta property="og:type" content="article">
<meta property="og:title" content="【二】快速入门Hadoop3.0大数据处理——Hadoop3.0安装部署">
<meta property="og:url" content="https://aiolos123.gitee.io/blog/2019/12/23/hadoop3.0-step-by-step-2/index.html">
<meta property="og:site_name" content="Aiolos">
<meta property="og:description" content="本文为慕课网《快速入门Hadoop3.0大数据处理》的第三章，主要讲解：Hadoop3.0伪分布式集群安装部署、Hadoop分布式集群安装部署 该课程地址：https://www.imooc.com/learn/1159">
<meta property="og:locale" content="zh-Hans">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224093408767.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224091923066.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224092436742.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224092625064.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224092802837.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224102125077.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224102952900.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224110923147.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224111750106.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224114807639.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224115218443.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224120713620.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224124757219.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224140337869.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224141211000.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224141637967.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224141957066.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224142437218.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224143045442.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224143518868.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224144048170.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224144352817.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224144812484.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224144938423.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224145316317.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224145553681.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224151407003.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224152233716.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224154425792.jpg">
<meta property="og:image" content="https://aiolos123.gitee.io/blog/images/20191224154848167.jpg">
<meta property="og:updated_time" content="2019-12-24T08:05:41.317Z">
<meta name="twitter:card" content="summary">
<meta name="twitter:title" content="【二】快速入门Hadoop3.0大数据处理——Hadoop3.0安装部署">
<meta name="twitter:description" content="本文为慕课网《快速入门Hadoop3.0大数据处理》的第三章，主要讲解：Hadoop3.0伪分布式集群安装部署、Hadoop分布式集群安装部署 该课程地址：https://www.imooc.com/learn/1159">
<meta name="twitter:image" content="https://aiolos123.gitee.io/blog/images/20191224093408767.jpg">



<script type="text/javascript" id="hexo.configurations">
  var NexT = window.NexT || {};
  var CONFIG = {
    root: '/blog/',
    scheme: 'Gemini',
    version: '5.1.4',
    sidebar: {"position":"left","display":"post","offset":12,"b2t":false,"scrollpercent":false,"onmobile":false},
    fancybox: true,
    tabs: true,
    motion: {"enable":true,"async":false,"transition":{"post_block":"fadeIn","post_header":"slideDownIn","post_body":"slideDownIn","coll_header":"slideLeftIn","sidebar":"slideUpIn"}},
    duoshuo: {
      userId: '0',
      author: '博主'
    },
    algolia: {
      applicationID: '',
      apiKey: '',
      indexName: '',
      hits: {"per_page":10},
      labels: {"input_placeholder":"Search for Posts","hits_empty":"We didn't find any results for the search: ${query}","hits_stats":"${hits} results found in ${time} ms"}
    }
  };
</script>



  <link rel="canonical" href="https://aiolos123.gitee.io/blog/2019/12/23/hadoop3.0-step-by-step-2/">





  <title>【二】快速入门Hadoop3.0大数据处理——Hadoop3.0安装部署 | Aiolos</title>
  








</head>

<body itemscope itemtype="http://schema.org/WebPage" lang="zh-Hans">

  
  
    
  

  <div class="container sidebar-position-left page-post-detail">
    <div class="headband"></div>

    <header id="header" class="header" itemscope itemtype="http://schema.org/WPHeader">
      <div class="header-inner"><div class="site-brand-wrapper">
  <div class="site-meta ">
    

    <div class="custom-logo-site-title">
      <a href="/blog/" class="brand" rel="start">
        <span class="logo-line-before"><i></i></span>
        <span class="site-title">Aiolos</span>
        <span class="logo-line-after"><i></i></span>
      </a>
    </div>
      
        <p class="site-subtitle">记录我的成长点滴</p>
      
  </div>

  <div class="site-nav-toggle">
    <button>
      <span class="btn-bar"></span>
      <span class="btn-bar"></span>
      <span class="btn-bar"></span>
    </button>
  </div>
</div>

<nav class="site-nav">
  

  
    <ul id="menu" class="menu">
      
        
        <li class="menu-item menu-item-home">
          <a href="/blog/" rel="section">
            
              <i class="menu-item-icon fa fa-fw fa-home"></i> <br>
            
            首页
          </a>
        </li>
      
        
        <li class="menu-item menu-item-tags">
          <a href="/blog/tags/" rel="section">
            
              <i class="menu-item-icon fa fa-fw fa-tags"></i> <br>
            
            标签
          </a>
        </li>
      
        
        <li class="menu-item menu-item-categories">
          <a href="/blog/categories/" rel="section">
            
              <i class="menu-item-icon fa fa-fw fa-th"></i> <br>
            
            分类
          </a>
        </li>
      
        
        <li class="menu-item menu-item-archives">
          <a href="/blog/archives/" rel="section">
            
              <i class="menu-item-icon fa fa-fw fa-archive"></i> <br>
            
            归档
          </a>
        </li>
      

      
        <li class="menu-item menu-item-search">
          
            <a href="javascript:;" class="popup-trigger">
          
            
              <i class="menu-item-icon fa fa-search fa-fw"></i> <br>
            
            搜索
          </a>
        </li>
      
    </ul>
  

  
    <div class="site-search">
      
  <div class="popup search-popup local-search-popup">
  <div class="local-search-header clearfix">
    <span class="search-icon">
      <i class="fa fa-search"></i>
    </span>
    <span class="popup-btn-close">
      <i class="fa fa-times-circle"></i>
    </span>
    <div class="local-search-input-wrapper">
      <input autocomplete="off" placeholder="搜索..." spellcheck="false" type="text" id="local-search-input">
    </div>
  </div>
  <div id="local-search-result"></div>
</div>



    </div>
  
</nav>



 </div>
    </header>

    <main id="main" class="main">
      <div class="main-inner">
        <div class="content-wrap">
          <div id="content" class="content">
            

  <div id="posts" class="posts-expand">
    

  

  
  
  

  <article class="post post-type-normal" itemscope itemtype="http://schema.org/Article">
  
  
  
  <div class="post-block">
    <link itemprop="mainEntityOfPage" href="https://aiolos123.gitee.io/blog/blog/2019/12/23/hadoop3.0-step-by-step-2/">

    <span hidden itemprop="author" itemscope itemtype="http://schema.org/Person">
      <meta itemprop="name" content="aiolos">
      <meta itemprop="description" content>
      <meta itemprop="image" content="/blog/images/avatar.gif">
    </span>

    <span hidden itemprop="publisher" itemscope itemtype="http://schema.org/Organization">
      <meta itemprop="name" content="Aiolos">
    </span>

    
      <header class="post-header">

        
        
          <h1 class="post-title" itemprop="name headline">【二】快速入门Hadoop3.0大数据处理——Hadoop3.0安装部署</h1>
        

        <div class="post-meta">
          <span class="post-time">
            
              <span class="post-meta-item-icon">
                <i class="fa fa-calendar-o"></i>
              </span>
              
                <span class="post-meta-item-text">发表于</span>
              
              <time title="创建于" itemprop="dateCreated datePublished" datetime="2019-12-23T05:53:30+08:00">
                2019-12-23
              </time>
            

            

            
          </span>

          
            <span class="post-category">
            
              <span class="post-meta-divider">|</span>
            
              <span class="post-meta-item-icon">
                <i class="fa fa-folder-o"></i>
              </span>
              
                <span class="post-meta-item-text">分类于</span>
              
              
                <span itemprop="about" itemscope itemtype="http://schema.org/Thing">
                  <a href="/blog/categories/Hadoop/" itemprop="url" rel="index">
                    <span itemprop="name">Hadoop</span>
                  </a>
                </span>

                
                
              
            </span>
          

          
            
          

          
          

          

          

          

        </div>
      </header>
    

    
    
    
    <div class="post-body" itemprop="articleBody">

      
      

      
        <p>本文为慕课网《快速入门Hadoop3.0大数据处理》的第三章，主要讲解：Hadoop3.0伪分布式集群安装部署、Hadoop分布式集群安装部署</p>
<p>该课程地址：<a href="https://www.imooc.com/learn/1159" target="_blank" rel="noopener">https://www.imooc.com/learn/1159</a></p>
<a id="more"></a>
<h2 id="Hadoop常见发行版介绍"><a href="#Hadoop常见发行版介绍" class="headerlink" title="Hadoop常见发行版介绍"></a>Hadoop常见发行版介绍</h2><ol>
<li>发行版：如安卓系统，华为、小米等都基于谷歌的原始安卓系统进行二次封装，它们就称为发行版</li>
</ol>
<blockquote>
<p>Hadoop的官方版本：Apache Hadoop，开源。 缺点是集群安装维护比较麻烦；<br>第三方发行版本： Cloudera Hadoop(CDH), 提供商业支持，收费，使用Cloudera Manager安装维护比较方便。<br>第三方发行版本： HortonWorks(HDP)，开源，使用Ambari安装维护比较方便，已被CDH收购</p>
</blockquote>
<ol start="2">
<li><p>本课程使用版本</p>
<blockquote>
<p>课程中使用的Hadoop版本： Apache Hadoop 3.2.0<br>使用的Linux版本：CentOS release 6.8X64(查看命令：cat /etc/redhat-release)</p>
</blockquote>
</li>
<li><p>关于CentOS6.8的安装请见：<a href="https://www.cnblogs.com/pigdata/p/10305530.html" target="_blank" rel="noopener">https://www.cnblogs.com/pigdata/p/10305530.html</a></p>
</li>
<li><p>修改CentOS系统为命令行模式启动<br><img src="/blog/images/20191224093408767.jpg" alt="修改CentOS系统为命令行模式启动"></p>
</li>
</ol>
<h3 id="CentOS6-8安装成功后的网络配置"><a href="#CentOS6-8安装成功后的网络配置" class="headerlink" title="CentOS6.8安装成功后的网络配置"></a>CentOS6.8安装成功后的网络配置</h3><p>安装CentOS6.8后，默认网络是未配置的(安装的虚拟机的网络模式为NAT)，按照上述博客进行配置，简要步骤如下：</p>
<ol>
<li>输入ifconfig命令，先查看ip地址<blockquote>
<p>发现除了回环地址以外，eth0中并没有IP地址，说明虚拟机并不能和外界通信，可以使用ping命令进行测试。 ping 百度,ping不通，说明虚拟机无法连接外网</p>
</blockquote>
</li>
</ol>
<p><img src="/blog/images/20191224091923066.jpg" alt="输入ifconfig命令"></p>
<ol start="2">
<li>修改网卡的配置文件<figure class="highlight awk"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line">vi <span class="regexp">/etc/</span>sysconfig<span class="regexp">/network-scripts/i</span>fcfg-eth0</span><br><span class="line"><span class="comment">#将其中的ONBOOT设置为yes，表示开启虚拟机网卡(安装完成CentOS后默认为no，即不开启虚拟机网卡)</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<p><img src="/blog/images/20191224092436742.jpg" alt="修改网卡的配置文件"></p>
<ol start="3">
<li>重启网络服务, 再次输入ifconfig命令，查看ip地址，如果能看到IP信息，则说明网卡已经成功开启<figure class="highlight routeros"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line">service<span class="built_in"> network </span>restart</span><br></pre></td></tr></table></figure>

</li>
</ol>
<p><img src="/blog/images/20191224092625064.jpg" alt="重启网络服务"></p>
<ol start="4">
<li>ping外网，测试网卡正常。如果可以ping通外网，说明虚拟机的网络已经配置成功<br><img src="/blog/images/20191224092802837.jpg" alt="ping外网"></li>
</ol>
<h3 id="CentOS配置阿里云yum源"><a href="#CentOS配置阿里云yum源" class="headerlink" title="CentOS配置阿里云yum源"></a>CentOS配置阿里云yum源</h3><ol>
<li><p>配置过程中的命令如下：</p>
<figure class="highlight dts"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br></pre></td><td class="code"><pre><span class="line"><span class="meta"># 1. 备份</span></span><br><span class="line">mv <span class="meta-keyword">/etc/</span>yum.repos.d/CentOS-Base.repo <span class="meta-keyword">/etc/</span>yum.repos.d/CentOS-Base.repo.backup</span><br><span class="line"><span class="meta"># 2.下载CentOS 6版本新的CentOS-Base.repo 到/etc/yum.repos.d/</span></span><br><span class="line">wget -O <span class="meta-keyword">/etc/</span>yum.repos.d/CentOS-Base.repo http:<span class="comment">//mirrors.aliyun.com/repo/Centos-6.repo</span></span><br><span class="line">或者</span><br><span class="line">curl -o <span class="meta-keyword">/etc/</span>yum.repos.d/CentOS-Base.repo http:<span class="comment">//mirrors.aliyun.com/repo/Centos-6.repo</span></span><br><span class="line"><span class="meta"># 3. 生成新的缓存</span></span><br><span class="line">yum makecache</span><br></pre></td></tr></table></figure>
</li>
<li><p>操作过程如下图：<br><img src="/blog/images/20191224102125077.jpg" alt="CentOS配置阿里云yum源"></p>
</li>
</ol>
<h3 id="卸载系统自带OpenJDK，安装Oracle-JDK8"><a href="#卸载系统自带OpenJDK，安装Oracle-JDK8" class="headerlink" title="卸载系统自带OpenJDK，安装Oracle JDK8"></a>卸载系统自带OpenJDK，安装Oracle JDK8</h3><ol>
<li><p>卸载系统自带OpenJDK过程的命令如下：</p>
<figure class="highlight lsl"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br></pre></td><td class="code"><pre><span class="line"># <span class="number">1.</span> 检测系统自带jdk版本</span><br><span class="line">[root@hadoop000 ~]# java -version</span><br><span class="line">java version <span class="string">"1.7.0_99"</span></span><br><span class="line">OpenJDK Runtime Environment (rhel<span class="number">-2.6</span><span class="number">.5</span><span class="number">.1</span>.el6-x86_64 u99-b00)</span><br><span class="line">OpenJDK <span class="number">64</span>-Bit Server VM (build <span class="number">24.95</span>-b01, mixed mode)</span><br><span class="line"># <span class="number">2.</span> 查看jdk的安装信息，获取卸载信息</span><br><span class="line">[root@hadoop000 ~]# rpm -qa | grep java</span><br><span class="line">tzdata-java<span class="number">-2016</span>c<span class="number">-1.</span>el6.noarch</span><br><span class="line">java<span class="number">-1.7</span><span class="number">.0</span>-openjdk<span class="number">-1.7</span><span class="number">.0</span><span class="number">.99</span><span class="number">-2.6</span><span class="number">.5</span><span class="number">.1</span>.el6.x86_64</span><br><span class="line">java<span class="number">-1.6</span><span class="number">.0</span>-openjdk<span class="number">-1.6</span><span class="number">.0</span><span class="number">.38</span><span class="number">-1.13</span><span class="number">.10</span><span class="number">.4</span>.el6.x86_64</span><br><span class="line"># <span class="number">3.</span> 执行以下命令，通过rpm卸载OpenJDK</span><br><span class="line">[root@hadoop000 ~]# rpm -e --nodeps java<span class="number">-1.7</span><span class="number">.0</span>-openjdk<span class="number">-1.7</span><span class="number">.0</span><span class="number">.99</span><span class="number">-2.6</span><span class="number">.5</span><span class="number">.1</span>.el6.x86_64</span><br><span class="line">[root@hadoop000 ~]# rpm -e --nodeps tzdata-java<span class="number">-2016</span>c<span class="number">-1.</span>el6.noarch</span><br><span class="line">[root@hadoop000 ~]# rpm -e --nodeps java<span class="number">-1.6</span><span class="number">.0</span>-openjdk<span class="number">-1.6</span><span class="number">.0</span><span class="number">.38</span><span class="number">-1.13</span><span class="number">.10</span><span class="number">.4</span>.el6.x86_64</span><br><span class="line"># <span class="number">4.</span> 确认是否卸载成功。执行如下命令，如果没有信息显示，则表示卸载成功</span><br><span class="line">[root@hadoop000 ~]# rpm -qa | grep java</span><br></pre></td></tr></table></figure>
</li>
<li><p>卸载系统自带OpenJDK过程如下图：<br><img src="/blog/images/20191224102952900.jpg" alt="卸载系统自带OpenJDK过程"></p>
</li>
<li><p>安装OracleJDK8过程的命令如下：</p>
<figure class="highlight crystal"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 1. 上传oracle jdk的安装文件到CentOS中：jdk-8u231-linux-x64.rpm</span></span><br><span class="line"><span class="comment"># 下载地址：https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html</span></span><br><span class="line"></span><br><span class="line"><span class="comment"># 2. 到上传目录，执行如下安装命令</span></span><br><span class="line">[root@hadoop000 ~]<span class="comment"># rpm -ivh jdk-8u231-linux-x64.rpm</span></span><br><span class="line"></span><br><span class="line"><span class="comment"># 3. 验证安装 JDK默认安装在/usr/java/jdk1.8.0_231-amd64中。执行以下操作，查看信息是否正常：</span></span><br><span class="line">[root@hadoop000 ~]<span class="comment"># java -version</span></span><br><span class="line">java version <span class="string">"1.8.0_231"</span></span><br><span class="line">Java(TM) SE Runtime Environment (build <span class="number">1.8</span>.<span class="number">0</span>_231-b11)</span><br><span class="line">Java HotSpot(TM) <span class="number">64</span>-Bit Server VM (build <span class="number">25.231</span>-b11, mixed mode)</span><br><span class="line"></span><br><span class="line"><span class="comment"># 4. 设置JAVA_HOME，修改系统环境变量文件</span></span><br><span class="line">vi /etc/profile</span><br><span class="line"><span class="comment"># 在文件最后追加以下内容</span></span><br><span class="line">JAVA_HOME=<span class="regexp">/usr/java</span><span class="regexp">/latest</span></span><br><span class="line"><span class="regexp">JRE_HOME=$JAVA_HOME/jre</span></span><br><span class="line">PATH=$<span class="symbol">PATH:</span>$JAVA_HOME/<span class="symbol">bin:</span>$JRE_HOME/bin</span><br><span class="line">CLASSPATH=.:$JAVA_HOME/<span class="class"><span class="keyword">lib</span>/<span class="title">dt</span>.<span class="title">jar</span>:$<span class="title">JAVA_HOME</span>/<span class="title">lib</span>/<span class="title">tools</span>.<span class="title">jar</span>:$<span class="title">JRE_HOME</span>/<span class="title">lib</span></span></span><br><span class="line">export JAVA_HOME JRE_HOME PATH CLASSPATH</span><br><span class="line"></span><br><span class="line"><span class="comment"># 5. 使修改生效，完成安装</span></span><br><span class="line">[root@hadoop-master ~]<span class="comment"># source /etc/profile   //使修改立即生效 </span></span><br><span class="line">[root@hadoop-master ~]<span class="comment"># echo $PATH   //查看PATH值</span></span><br></pre></td></tr></table></figure>
</li>
<li><p>安装OracleJDK8过程如下图：<br><img src="/blog/images/20191224110923147.jpg" alt="卸载系统自带OpenJDK过程"></p>
</li>
</ol>
<h2 id="Hadoop伪分布式集群安装部署"><a href="#Hadoop伪分布式集群安装部署" class="headerlink" title="Hadoop伪分布式集群安装部署"></a>Hadoop伪分布式集群安装部署</h2><blockquote>
<p>使用1台Linux虚拟机安装伪分布式</p>
</blockquote>
<p><strong>注意：Hadoop伪分布式集群和Hadoop分布式集群安装完成的Hadoop，在具体操作使用Hadoop上是完全一致的</strong></p>
<h3 id="Linux基本环境配置"><a href="#Linux基本环境配置" class="headerlink" title="Linux基本环境配置"></a>Linux基本环境配置</h3><ol>
<li>IP配置<blockquote>
<p>IP的配置见上一节《CentOS6.8安装成功后的网络配置》，可通过ifconfig命令查看IP(eth0中的ip即是)</p>
</blockquote>
</li>
</ol>
<p><img src="/blog/images/20191224111750106.jpg" alt="IP的配置"></p>
<ol start="2">
<li>主机名的永久设置<blockquote>
<p>通过hostname命令可以查看主机名<br>通过hostname xxx命令可以临时修改主机名，重启后失效<br>通过修改/etc/sysconfig/network文件的方式永久设置主机名</p>
</blockquote>
</li>
</ol>
<p><img src="/blog/images/20191224114807639.jpg" alt="永久设置主机名"></p>
<ol start="3">
<li>Hosts文件的修改(为了绑定IP和主机名的映射关系)<blockquote>
<p>在/etc/hosts文件的最后增加”ip 主机名”一行即可</p>
</blockquote>
</li>
</ol>
<p><img src="/blog/images/20191224115218443.jpg" alt="Hosts文件的修改"></p>
<ol start="4">
<li><p>永久关闭防火墙</p>
<blockquote>
<p>service iptables status # 查看防火墙的状态<br>service iptables stop # 临时关闭防火墙<br>chkconfig iptables off # 永久关闭防火墙(从开机启动项中移除)</p>
</blockquote>
</li>
<li><p>SSH免密码登录(因为Hadoop要通过SSH免密码连接各机器)</p>
<figure class="highlight ruby"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 配置SSH免密码登录的命令如下</span></span><br><span class="line">ssh-keygen -t rsa <span class="comment">#一直按回车即可</span></span><br><span class="line"><span class="comment"># 追加id_rsa.pub文件到重定向文件authorized_keys</span></span><br><span class="line">cat ~<span class="regexp">/.ssh/id</span>_rsa.pub <span class="meta">&gt;&gt; </span>~<span class="regexp">/.ssh/authorized</span>_keys</span><br><span class="line"><span class="comment"># 配置SSH免密码登录，进行验证</span></span><br><span class="line">ssh hadoop00<span class="number">0</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>配置SSH免密码登录的过程如下：<br><img src="/blog/images/20191224120713620.jpg" alt="配置SSH免密码登录的过程"></p>
<ol start="6">
<li>安装JDK8<blockquote>
<p>见上一节《卸载系统自带OpenJDK，安装Oracle JDK8》</p>
</blockquote>
</li>
</ol>
<h3 id="Hadoop伪分布式集群安装"><a href="#Hadoop伪分布式集群安装" class="headerlink" title="Hadoop伪分布式集群安装"></a>Hadoop伪分布式集群安装</h3><p>Hadoop伪分布式集群安装过程的命令如下：</p>
<ol>
<li>下载Apache Hadoop 3.2.0 版本 : hadoop-3.2.0.tar.gz 并上传到服务器<blockquote>
<p>apache官网下载地址： <a href="https://archive.apache.org/dist/hadoop/common/hadoop-3.2.0/" target="_blank" rel="noopener">https://archive.apache.org/dist/hadoop/common/hadoop-3.2.0/</a></p>
</blockquote>
</li>
</ol>
<p><img src="/blog/images/20191224124757219.jpg" alt="下载Apache Hadoop"></p>
<ol start="2">
<li>解压 hadoop-3.2.0.tar.gz <figure class="highlight css"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><span class="line"><span class="selector-tag">tar</span> <span class="selector-tag">-zxvf</span> <span class="selector-tag">hadoop-3</span><span class="selector-class">.2</span><span class="selector-class">.0</span><span class="selector-class">.tar</span><span class="selector-class">.gz</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<blockquote>
<p>hadoop下的bin目录——启动Hadoop客户端的目录；sbin目录——启动Hadoop服务端的目录；etc目录——Hadoop配置文件目录</p>
</blockquote>
<ol start="3">
<li>修改etc/hadoop/core-site.xml配置文件, 将configuration节点修改为如下内容<figure class="highlight dts"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><span class="line"><span class="meta"># 需要修改hdfs:<span class="comment">//后的主机名为hadoop000；并且需要手动在服务器上创建/data 目录</span></span></span><br><span class="line"><span class="meta"># 配置的是HDFS的主节点地址：hdfs:<span class="comment">//hadoop000:9000 及hadoop存储文件的目录</span></span></span><br><span class="line"><span class="params">&lt;configuration&gt;</span></span><br><span class="line">  <span class="params">&lt;property&gt;</span></span><br><span class="line">    <span class="params">&lt;name&gt;</span>fs.defaultFS<span class="params">&lt;/name&gt;</span></span><br><span class="line">    <span class="params">&lt;value&gt;</span>hdfs:<span class="comment">//hadoop000:9000&lt;/value&gt;</span></span><br><span class="line">  <span class="params">&lt;/property&gt;</span></span><br><span class="line">  <span class="params">&lt;property&gt;</span></span><br><span class="line">    <span class="params">&lt;name&gt;</span>hadoop.tmp.dir<span class="params">&lt;/name&gt;</span></span><br><span class="line">    <span class="params">&lt;value&gt;</span><span class="meta-keyword">/data/</span>hadoop_repo<span class="params">&lt;/value&gt;</span></span><br><span class="line">  <span class="params">&lt;/property&gt;</span></span><br><span class="line"><span class="params">&lt;/configuration&gt;</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>修改过程如下图：<br><img src="/blog/images/20191224140337869.jpg" alt="修改core-site.xml配置文件"></p>
<ol start="4">
<li>修改etc/hadoop/hdfs-site.xml 文件，增加如下内容<figure class="highlight dts"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><span class="line"><span class="meta"># 把 hdfs 中文件副本的数量设置为 1，因为伪分布集群只有一个节点</span></span><br><span class="line"><span class="params">&lt;configuration&gt;</span></span><br><span class="line">  <span class="params">&lt;property&gt;</span></span><br><span class="line">    <span class="params">&lt;name&gt;</span>dfs.replication<span class="params">&lt;/name&gt;</span></span><br><span class="line">    <span class="params">&lt;value&gt;</span><span class="number">1</span><span class="params">&lt;/value&gt;</span></span><br><span class="line">  <span class="params">&lt;/property&gt;</span></span><br><span class="line"><span class="params">&lt;/configuration&gt;</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>修改过程如下图：<br><img src="/blog/images/20191224141211000.jpg" alt="修改hdfs-site.xml配置文件"></p>
<ol start="5">
<li>修改 mapred-site.xml，设置 mapreduce 使用的资源调度框架yarn<figure class="highlight xml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><span class="line"><span class="tag">&lt;<span class="name">configuration</span>&gt;</span></span><br><span class="line">  <span class="tag">&lt;<span class="name">property</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">name</span>&gt;</span>mapreduce.framework.name<span class="tag">&lt;/<span class="name">name</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">value</span>&gt;</span>yarn<span class="tag">&lt;/<span class="name">value</span>&gt;</span></span><br><span class="line">  <span class="tag">&lt;/<span class="name">property</span>&gt;</span></span><br><span class="line"><span class="tag">&lt;/<span class="name">configuration</span>&gt;</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>修改过程如下图：<br><img src="/blog/images/20191224141637967.jpg" alt="修改mapred-site.xml配置文件"></p>
<ol start="6">
<li>修改 yarn-site.xml，设置 yarn 上支持运行的服务和环境变量白名单<figure class="highlight xml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><span class="line"><span class="tag">&lt;<span class="name">configuration</span>&gt;</span></span><br><span class="line">  <span class="tag">&lt;<span class="name">property</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">name</span>&gt;</span>yarn.nodemanager.aux-services<span class="tag">&lt;/<span class="name">name</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">value</span>&gt;</span>mapreduce_shuffle<span class="tag">&lt;/<span class="name">value</span>&gt;</span></span><br><span class="line">  <span class="tag">&lt;/<span class="name">property</span>&gt;</span></span><br><span class="line">  <span class="tag">&lt;<span class="name">property</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">name</span>&gt;</span>yarn.nodemanager.env-whitelist<span class="tag">&lt;/<span class="name">name</span>&gt;</span></span><br><span class="line">    <span class="tag">&lt;<span class="name">value</span>&gt;</span>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CL</span><br><span class="line">ASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_MAPRED_HOME<span class="tag">&lt;/<span class="name">value</span>&gt;</span></span><br><span class="line">  <span class="tag">&lt;/<span class="name">property</span>&gt;</span></span><br><span class="line"><span class="tag">&lt;/<span class="name">configuration</span>&gt;</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>修改过程如下图：<br><img src="/blog/images/20191224141957066.jpg" alt="修改yarn-site.xml配置文件"></p>
<ol start="7">
<li>修改 hadoop-env.sh 文件，增加如下两条Hadoop的环境变量信息和日志存放目录<figure class="highlight routeros"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="builtin-name">export</span> <span class="attribute">JAVA_HOME</span>=/usr/java/latest</span><br><span class="line"><span class="builtin-name">export</span> <span class="attribute">HADOOP_LOG_DIR</span>=/data/hadoop_repo/logs/hadoop</span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>修改过程如下图：<br><img src="/blog/images/20191224142437218.jpg" alt="修改hadoop-env.sh文件"></p>
<ol start="8">
<li>格式化namenode(仅第一次使用Hadoop时需要执行)<figure class="highlight fortran"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><span class="line">cd Hadoop根目录</span><br><span class="line">bin/hdfs namenode -<span class="keyword">format</span></span><br><span class="line"># 如果在后面的日志信息中能看到这一行，则说明 namenode 格式化成功。</span><br><span class="line"><span class="keyword">common</span>.Storage: Storage directory /<span class="keyword">data</span>/hadoop_repo/dfs/<span class="keyword">name</span> has been successfully</span><br><span class="line"><span class="keyword">formatted</span>.</span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>格式化namenode过程如下图：<br><img src="/blog/images/20191224143045442.jpg" alt="格式化 namenode"></p>
<p>如果格式化失败，则删除/data/hadoop_repo目录，重新格式化即可</p>
<ol start="9">
<li>启动 hadoop 集群<figure class="highlight apache"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 启动 hadoop 集群的命令</span></span><br><span class="line"><span class="attribute">sbin</span>/start-<span class="literal">all</span>.sh</span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>执行上述命令启动 hadoop 集群时，发现在启动的时候报错，提示缺少 HDFS 和 YARN 的一些用户信息。如下图：<br><img src="/blog/images/20191224143518868.jpg" alt="启动 hadoop 集群报错"></p>
<p>解决方案如下：</p>
<figure class="highlight ini"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 1. 修改 sbin/start-dfs.sh，sbin/stop-dfs.sh 这两个脚本文件，在文件前面分别增加如下内容</span></span><br><span class="line"><span class="attr">HDFS_DATANODE_USER</span>=root</span><br><span class="line"><span class="attr">HDFS_DATANODE_SECURE_USER</span>=hdfs</span><br><span class="line"><span class="attr">HDFS_NAMENODE_USER</span>=root</span><br><span class="line"><span class="attr">HDFS_SECONDARYNAMENODE_USER</span>=root</span><br><span class="line"></span><br><span class="line"><span class="comment"># 2. 修改 sbin/start-yarn.sh，sbin/stop-yarn.sh 这两个脚本文件，在文件前面增加如下内容</span></span><br><span class="line"><span class="attr">YARN_RESOURCEMANAGER_USER</span>=root</span><br><span class="line"><span class="attr">HADOOP_SECURE_DN_USER</span>=yarn</span><br><span class="line"><span class="attr">YARN_NODEMANAGER_USER</span>=root</span><br></pre></td></tr></table></figure>

<p>解决方案的图示如下：<br><img src="/blog/images/20191224144048170.jpg" alt="修改 sbin/start-dfs.sh，sbin/stop-dfs.sh "><br><img src="/blog/images/20191224144352817.jpg" alt="修改 sbin/start-yarn.sh，sbin/stop-yarn.sh "></p>
<p>再次重新启动Hadoop集群</p>
<figure class="highlight apache"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 启动 hadoop 集群的命令</span></span><br><span class="line"><span class="attribute">sbin</span>/start-<span class="literal">all</span>.sh</span><br></pre></td></tr></table></figure>

<p>启动Hadoop集群过程如下图：<br><img src="/blog/images/20191224144812484.jpg" alt="启动 hadoop 集群"></p>
<ol start="10">
<li>验证Hadoop集群启动成功，输入jps命令，如果能看到如下5个进程，则说明Hadoop集群启动成功<br><img src="/blog/images/20191224144938423.jpg" alt="验证Hadoop集群启动成功"></li>
</ol>
<p>也可以通过浏览器访问 <a href="http://192.168.126.131:9870" target="_blank" rel="noopener">http://192.168.126.131:9870</a> 或 <a href="http://192.168.126.131:8088" target="_blank" rel="noopener">http://192.168.126.131:8088</a> ，如果能看到如下界面，则说明Hadoop集群启动成功<br><img src="/blog/images/20191224145316317.jpg" alt="验证Hadoop集群启动成功"></p>
<ol start="11">
<li>关闭Hadoop集群<figure class="highlight vim"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"># 关闭Hadoop集群的命令</span><br><span class="line">sbin/<span class="keyword">stop</span>-<span class="keyword">all</span>.<span class="keyword">sh</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>关闭Hadoop集群的操作过程如下图：<br><img src="/blog/images/20191224145553681.jpg" alt="关闭Hadoop集群"></p>
<h2 id="Hadoop分布式集群安装部署"><a href="#Hadoop分布式集群安装部署" class="headerlink" title="Hadoop分布式集群安装部署"></a>Hadoop分布式集群安装部署</h2><blockquote>
<p>使用3台Linux虚拟机安装分布式集群</p>
</blockquote>
<p>具体安装过程见：<a href="https://www.imooc.com/video/20729" target="_blank" rel="noopener">https://www.imooc.com/video/20729</a><br>本文中略写，仅列出关键点：</p>
<ol>
<li>每台机器都要执行一边 《Linux基本环境配置》一节中的配置，并且在每个hosts文件中将其他所有机器的主机名与IP的映射关系都要配置上</li>
<li>在Hadoop分布式集群中配置免密码登陆时，至少需要实现主节点可以免密码登录到自己及其它所有节点即可(因为从节点上面<br>的进程是由主节点通过 ssh 远程启动的)<figure class="highlight autoit"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><span class="line"><span class="meta"># 在 hadoop100 这台机器上执行下面命令，即可实现主节点hadoop100免密码登陆到所有从节点hadoop101、hadoop102。</span></span><br><span class="line">[root<span class="symbol">@hadoop100</span> ~]<span class="meta"># ssh-copy-id -i hadoop101</span></span><br><span class="line">[root<span class="symbol">@hadoop100</span> ~]<span class="meta"># ssh-copy-id -i hadoop102</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<p>配置过程如下图：<br><img src="/blog/images/20191224151407003.jpg" alt="实现该机到其他节点的免密登录"></p>
<ol start="3">
<li>上述两步为Linux环境的配置，接下来就是Hadoop分布式集群安装部署<blockquote>
<p>如果机器上已经安装过Hadoop，一定要删除Hadoop安装目录和data下的hadoop_repo目录。相当于清除之前的Hadoop安装信息</p>
</blockquote>
</li>
</ol>
<p><img src="/blog/images/20191224152233716.jpg" alt="删除Hadoop安装目录和data下的hadoop_repo目录"></p>
<ol start="4">
<li>先在主节点上安装配置Hadoop<blockquote>
<p>步骤基本同《Hadoop伪分布式集群安装》一节，不同的地方只有如下3处：</p>
</blockquote>
</li>
</ol>
<figure class="highlight gherkin"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br></pre></td><td class="code"><pre><span class="line"><span class="comment"># 1. 修改hdfs-site.xml，因为是分布式集群(有2个从节点)，指定2个副本。同时指定secondaryNamenode在哪个机器上启动</span></span><br><span class="line">[root<span class="meta">@hadoop100</span> hadoop]<span class="comment"># vi hdfs-site.xml</span></span><br><span class="line"><span class="variable">&lt;configuration&gt;</span></span><br><span class="line">  <span class="variable">&lt;property&gt;</span></span><br><span class="line">    <span class="variable">&lt;name&gt;</span>dfs.replication<span class="variable">&lt;/name&gt;</span></span><br><span class="line">    <span class="variable">&lt;value&gt;</span>2<span class="variable">&lt;/value&gt;</span></span><br><span class="line">  <span class="variable">&lt;/property&gt;</span></span><br><span class="line">  <span class="variable">&lt;property&gt;</span></span><br><span class="line">    <span class="variable">&lt;name&gt;</span>dfs.namenode.secondary.http-address<span class="variable">&lt;/name&gt;</span></span><br><span class="line">    <span class="variable">&lt;value&gt;</span>hadoop100:50090<span class="variable">&lt;/value&gt;</span></span><br><span class="line">  <span class="variable">&lt;/property&gt;</span></span><br><span class="line"><span class="variable">&lt;/configuration&gt;</span></span><br><span class="line"></span><br><span class="line"><span class="comment"># 2. 修改 yarn-site.xml，增加指定resourcemanager在哪个机器上启动</span></span><br><span class="line">[root<span class="meta">@hadoop100</span> hadoop]<span class="comment"># vi yarn-site.xml</span></span><br><span class="line"><span class="variable">&lt;configuration&gt;</span></span><br><span class="line">  <span class="variable">&lt;property&gt;</span></span><br><span class="line">    <span class="variable">&lt;name&gt;</span>yarn.nodemanager.aux-services<span class="variable">&lt;/name&gt;</span></span><br><span class="line">    <span class="variable">&lt;value&gt;</span>mapreduce_shuffle<span class="variable">&lt;/value&gt;</span></span><br><span class="line">  <span class="variable">&lt;/property&gt;</span></span><br><span class="line">  <span class="variable">&lt;property&gt;</span></span><br><span class="line">    <span class="variable">&lt;name&gt;</span>yarn.nodemanager.env-whitelist<span class="variable">&lt;/name&gt;</span></span><br><span class="line">    <span class="variable">&lt;value&gt;</span>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CL</span><br><span class="line">ASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_MAPRED_HOME<span class="variable">&lt;/value&gt;</span></span><br><span class="line">  <span class="variable">&lt;/property&gt;</span></span><br><span class="line">  <span class="variable">&lt;property&gt;</span></span><br><span class="line">    <span class="variable">&lt;name&gt;</span>yarn.resourcemanager.hostname<span class="variable">&lt;/name&gt;</span></span><br><span class="line">    <span class="variable">&lt;value&gt;</span>hadoop100<span class="variable">&lt;/value&gt;</span></span><br><span class="line">  <span class="variable">&lt;/property&gt;</span></span><br><span class="line"><span class="variable">&lt;/configuration&gt;</span></span><br><span class="line"></span><br><span class="line"><span class="comment"># 3. 修改 workers 文件，增加所有从节点的主机名，一个一行(告诉Hadoop集群，哪些是从节点)</span></span><br><span class="line">[root<span class="meta">@hadoop100</span> hadoop]<span class="comment"># vi workers</span></span><br><span class="line">hadoop101</span><br><span class="line">hadoop102</span><br></pre></td></tr></table></figure>

<ol start="5">
<li><p>把主节点上修改好配置的Hadoop安装包拷贝到其他两个从节点(因为集群中各机器上的配置文件是一样的)<br><img src="/blog/images/20191224154425792.jpg" alt="把主节点上修改好配置的Hadoop安装包拷贝到其他两个从节点"></p>
</li>
<li><p>在主节点上格式化namenode</p>
</li>
<li><p>在主节点上执行下面命令启动Hadoop集群</p>
<figure class="highlight apache"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><span class="line"><span class="attribute">sbin</span>/start-<span class="literal">all</span>.sh</span><br><span class="line"><span class="comment"># 验证集群启动成功，主节点中有3个进程，从节点有2个进程</span></span><br></pre></td></tr></table></figure>

</li>
</ol>
<p><img src="/blog/images/20191224154848167.jpg" alt="启动Hadoop集群并验证"></p>
<p>至此，hadoop 分布式集群安装成功！</p>
<h2 id="Hadoop客户端节点安装"><a href="#Hadoop客户端节点安装" class="headerlink" title="Hadoop客户端节点安装"></a>Hadoop客户端节点安装</h2><ol>
<li>在实际工作中，不建议开发人员直接连接集群中的节点来操作集群，因为直接把集群中的节点暴露给普通开发人员是不安全的。</li>
<li>建议在业务机器上安装Hadoop客户端，这样就可以在业务机器上操作Hadoop集群了。</li>
</ol>
<blockquote>
<p>Hadoop客户端节点：安装的Hadoop只需与Hadoop集群中的配置文件保持一致，而无需启动Hadoop进程即可操作Hadoop集群</p>
</blockquote>

      
    </div>
    
    
    

    

    

    

    <footer class="post-footer">
      
        <div class="post-tags">
          
            <a href="/blog/tags/Hadoop3-0入门/" rel="tag"># Hadoop3.0入门</a>
          
        </div>
      

      
      
      

      
        <div class="post-nav">
          <div class="post-nav-next post-nav-item">
            
              <a href="/blog/2019/12/22/machine-learning-classical-algorithm-3/" rel="next" title="【三】Python3入门机器学习经典算法与应用——最基础的分类算法-kNN近邻算法">
                <i class="fa fa-chevron-left"></i> 【三】Python3入门机器学习经典算法与应用——最基础的分类算法-kNN近邻算法
              </a>
            
          </div>

          <span class="post-nav-divider"></span>

          <div class="post-nav-prev post-nav-item">
            
              <a href="/blog/2019/12/24/hadoop3.0-step-by-step-3/" rel="prev" title="【三】快速入门Hadoop3.0大数据处理——Hadoop三大组件详解">
                【三】快速入门Hadoop3.0大数据处理——Hadoop三大组件详解 <i class="fa fa-chevron-right"></i>
              </a>
            
          </div>
        </div>
      

      
      
    </footer>
  </div>
  
  
  
  </article>



    <div class="post-spread">
      
    </div>
  </div>


          </div>
          


          

  



        </div>
        
          
  
  <div class="sidebar-toggle">
    <div class="sidebar-toggle-line-wrap">
      <span class="sidebar-toggle-line sidebar-toggle-line-first"></span>
      <span class="sidebar-toggle-line sidebar-toggle-line-middle"></span>
      <span class="sidebar-toggle-line sidebar-toggle-line-last"></span>
    </div>
  </div>

  <aside id="sidebar" class="sidebar">
    
    <div class="sidebar-inner">

      

      
        <ul class="sidebar-nav motion-element">
          <li class="sidebar-nav-toc sidebar-nav-active" data-target="post-toc-wrap">
            文章目录
          </li>
          <li class="sidebar-nav-overview" data-target="site-overview-wrap">
            站点概览
          </li>
        </ul>
      

      <section class="site-overview-wrap sidebar-panel">
        <div class="site-overview">
          <div class="site-author motion-element" itemprop="author" itemscope itemtype="http://schema.org/Person">
            
              <p class="site-author-name" itemprop="name">aiolos</p>
              <p class="site-description motion-element" itemprop="description">Java Spring Hadoop 机器学习</p>
          </div>

          <nav class="site-state motion-element">

            
              <div class="site-state-item site-state-posts">
              
                <a href="/blog/archives/">
              
                  <span class="site-state-item-count">126</span>
                  <span class="site-state-item-name">日志</span>
                </a>
              </div>
            

            
              
              
              <div class="site-state-item site-state-categories">
                <a href="/blog/categories/index.html">
                  <span class="site-state-item-count">16</span>
                  <span class="site-state-item-name">分类</span>
                </a>
              </div>
            

            
              
              
              <div class="site-state-item site-state-tags">
                <a href="/blog/tags/index.html">
                  <span class="site-state-item-count">33</span>
                  <span class="site-state-item-name">标签</span>
                </a>
              </div>
            

          </nav>

          

          

          
          

          
          

          

        </div>
      </section>

      
      <!--noindex-->
        <section class="post-toc-wrap motion-element sidebar-panel sidebar-panel-active">
          <div class="post-toc">

            
              
            

            
              <div class="post-toc-content"><ol class="nav"><li class="nav-item nav-level-2"><a class="nav-link" href="#Hadoop常见发行版介绍"><span class="nav-number">1.</span> <span class="nav-text">Hadoop常见发行版介绍</span></a><ol class="nav-child"><li class="nav-item nav-level-3"><a class="nav-link" href="#CentOS6-8安装成功后的网络配置"><span class="nav-number">1.1.</span> <span class="nav-text">CentOS6.8安装成功后的网络配置</span></a></li><li class="nav-item nav-level-3"><a class="nav-link" href="#CentOS配置阿里云yum源"><span class="nav-number">1.2.</span> <span class="nav-text">CentOS配置阿里云yum源</span></a></li><li class="nav-item nav-level-3"><a class="nav-link" href="#卸载系统自带OpenJDK，安装Oracle-JDK8"><span class="nav-number">1.3.</span> <span class="nav-text">卸载系统自带OpenJDK，安装Oracle JDK8</span></a></li></ol></li><li class="nav-item nav-level-2"><a class="nav-link" href="#Hadoop伪分布式集群安装部署"><span class="nav-number">2.</span> <span class="nav-text">Hadoop伪分布式集群安装部署</span></a><ol class="nav-child"><li class="nav-item nav-level-3"><a class="nav-link" href="#Linux基本环境配置"><span class="nav-number">2.1.</span> <span class="nav-text">Linux基本环境配置</span></a></li><li class="nav-item nav-level-3"><a class="nav-link" href="#Hadoop伪分布式集群安装"><span class="nav-number">2.2.</span> <span class="nav-text">Hadoop伪分布式集群安装</span></a></li></ol></li><li class="nav-item nav-level-2"><a class="nav-link" href="#Hadoop分布式集群安装部署"><span class="nav-number">3.</span> <span class="nav-text">Hadoop分布式集群安装部署</span></a></li><li class="nav-item nav-level-2"><a class="nav-link" href="#Hadoop客户端节点安装"><span class="nav-number">4.</span> <span class="nav-text">Hadoop客户端节点安装</span></a></li></ol></div>
            

          </div>
        </section>
      <!--/noindex-->
      

      

    </div>
  </aside>


        
      </div>
    </main>

    <footer id="footer" class="footer">
      <div class="footer-inner">
        <div class="copyright">&copy; <span itemprop="copyrightYear">2020</span>
  <span class="with-love">
    <i class="fa fa-user"></i>
  </span>
  <span class="author" itemprop="copyrightHolder">aiolos</span>

  
</div>


  <div class="powered-by">由 <a class="theme-link" target="_blank" href="https://hexo.io">Hexo</a> 强力驱动</div>



  <span class="post-meta-divider">|</span>



  <div class="theme-info">主题 &mdash; <a class="theme-link" target="_blank" href="https://github.com/iissnan/hexo-theme-next">NexT.Gemini</a> v5.1.4</div>




        







        
      </div>
    </footer>

    
      <div class="back-to-top">
        <i class="fa fa-arrow-up"></i>
        
      </div>
    

    

  </div>

  

<script type="text/javascript">
  if (Object.prototype.toString.call(window.Promise) !== '[object Function]') {
    window.Promise = null;
  }
</script>









  












  
  
    <script type="text/javascript" src="/blog/lib/jquery/index.js?v=2.1.3"></script>
  

  
  
    <script type="text/javascript" src="/blog/lib/fastclick/lib/fastclick.min.js?v=1.0.6"></script>
  

  
  
    <script type="text/javascript" src="/blog/lib/jquery_lazyload/jquery.lazyload.js?v=1.9.7"></script>
  

  
  
    <script type="text/javascript" src="/blog/lib/velocity/velocity.min.js?v=1.2.1"></script>
  

  
  
    <script type="text/javascript" src="/blog/lib/velocity/velocity.ui.min.js?v=1.2.1"></script>
  

  
  
    <script type="text/javascript" src="/blog/lib/fancybox/source/jquery.fancybox.pack.js?v=2.1.5"></script>
  


  


  <script type="text/javascript" src="/blog/js/src/utils.js?v=5.1.4"></script>

  <script type="text/javascript" src="/blog/js/src/motion.js?v=5.1.4"></script>



  
  


  <script type="text/javascript" src="/blog/js/src/affix.js?v=5.1.4"></script>

  <script type="text/javascript" src="/blog/js/src/schemes/pisces.js?v=5.1.4"></script>



  
  <script type="text/javascript" src="/blog/js/src/scrollspy.js?v=5.1.4"></script>
<script type="text/javascript" src="/blog/js/src/post-details.js?v=5.1.4"></script>



  


  <script type="text/javascript" src="/blog/js/src/bootstrap.js?v=5.1.4"></script>



  


  




	





  





  












  

  <script type="text/javascript">
    // Popup Window;
    var isfetched = false;
    var isXml = true;
    // Search DB path;
    var search_path = "search.xml";
    if (search_path.length === 0) {
      search_path = "search.xml";
    } else if (/json$/i.test(search_path)) {
      isXml = false;
    }
    var path = "/blog/" + search_path;
    // monitor main search box;

    var onPopupClose = function (e) {
      $('.popup').hide();
      $('#local-search-input').val('');
      $('.search-result-list').remove();
      $('#no-result').remove();
      $(".local-search-pop-overlay").remove();
      $('body').css('overflow', '');
    }

    function proceedsearch() {
      $("body")
        .append('<div class="search-popup-overlay local-search-pop-overlay"></div>')
        .css('overflow', 'hidden');
      $('.search-popup-overlay').click(onPopupClose);
      $('.popup').toggle();
      var $localSearchInput = $('#local-search-input');
      $localSearchInput.attr("autocapitalize", "none");
      $localSearchInput.attr("autocorrect", "off");
      $localSearchInput.focus();
    }

    // search function;
    var searchFunc = function(path, search_id, content_id) {
      'use strict';

      // start loading animation
      $("body")
        .append('<div class="search-popup-overlay local-search-pop-overlay">' +
          '<div id="search-loading-icon">' +
          '<i class="fa fa-spinner fa-pulse fa-5x fa-fw"></i>' +
          '</div>' +
          '</div>')
        .css('overflow', 'hidden');
      $("#search-loading-icon").css('margin', '20% auto 0 auto').css('text-align', 'center');

      $.ajax({
        url: path,
        dataType: isXml ? "xml" : "json",
        async: true,
        success: function(res) {
          // get the contents from search data
          isfetched = true;
          $('.popup').detach().appendTo('.header-inner');
          var datas = isXml ? $("entry", res).map(function() {
            return {
              title: $("title", this).text(),
              content: $("content",this).text(),
              url: $("url" , this).text()
            };
          }).get() : res;
          var input = document.getElementById(search_id);
          var resultContent = document.getElementById(content_id);
          var inputEventFunction = function() {
            var searchText = input.value.trim().toLowerCase();
            var keywords = searchText.split(/[\s\-]+/);
            if (keywords.length > 1) {
              keywords.push(searchText);
            }
            var resultItems = [];
            if (searchText.length > 0) {
              // perform local searching
              datas.forEach(function(data) {
                var isMatch = false;
                var hitCount = 0;
                var searchTextCount = 0;
                var title = data.title.trim();
                var titleInLowerCase = title.toLowerCase();
                var content = data.content.trim().replace(/<[^>]+>/g,"");
                var contentInLowerCase = content.toLowerCase();
                var articleUrl = decodeURIComponent(data.url);
                var indexOfTitle = [];
                var indexOfContent = [];
                // only match articles with not empty titles
                if(title != '') {
                  keywords.forEach(function(keyword) {
                    function getIndexByWord(word, text, caseSensitive) {
                      var wordLen = word.length;
                      if (wordLen === 0) {
                        return [];
                      }
                      var startPosition = 0, position = [], index = [];
                      if (!caseSensitive) {
                        text = text.toLowerCase();
                        word = word.toLowerCase();
                      }
                      while ((position = text.indexOf(word, startPosition)) > -1) {
                        index.push({position: position, word: word});
                        startPosition = position + wordLen;
                      }
                      return index;
                    }

                    indexOfTitle = indexOfTitle.concat(getIndexByWord(keyword, titleInLowerCase, false));
                    indexOfContent = indexOfContent.concat(getIndexByWord(keyword, contentInLowerCase, false));
                  });
                  if (indexOfTitle.length > 0 || indexOfContent.length > 0) {
                    isMatch = true;
                    hitCount = indexOfTitle.length + indexOfContent.length;
                  }
                }

                // show search results

                if (isMatch) {
                  // sort index by position of keyword

                  [indexOfTitle, indexOfContent].forEach(function (index) {
                    index.sort(function (itemLeft, itemRight) {
                      if (itemRight.position !== itemLeft.position) {
                        return itemRight.position - itemLeft.position;
                      } else {
                        return itemLeft.word.length - itemRight.word.length;
                      }
                    });
                  });

                  // merge hits into slices

                  function mergeIntoSlice(text, start, end, index) {
                    var item = index[index.length - 1];
                    var position = item.position;
                    var word = item.word;
                    var hits = [];
                    var searchTextCountInSlice = 0;
                    while (position + word.length <= end && index.length != 0) {
                      if (word === searchText) {
                        searchTextCountInSlice++;
                      }
                      hits.push({position: position, length: word.length});
                      var wordEnd = position + word.length;

                      // move to next position of hit

                      index.pop();
                      while (index.length != 0) {
                        item = index[index.length - 1];
                        position = item.position;
                        word = item.word;
                        if (wordEnd > position) {
                          index.pop();
                        } else {
                          break;
                        }
                      }
                    }
                    searchTextCount += searchTextCountInSlice;
                    return {
                      hits: hits,
                      start: start,
                      end: end,
                      searchTextCount: searchTextCountInSlice
                    };
                  }

                  var slicesOfTitle = [];
                  if (indexOfTitle.length != 0) {
                    slicesOfTitle.push(mergeIntoSlice(title, 0, title.length, indexOfTitle));
                  }

                  var slicesOfContent = [];
                  while (indexOfContent.length != 0) {
                    var item = indexOfContent[indexOfContent.length - 1];
                    var position = item.position;
                    var word = item.word;
                    // cut out 100 characters
                    var start = position - 20;
                    var end = position + 80;
                    if(start < 0){
                      start = 0;
                    }
                    if (end < position + word.length) {
                      end = position + word.length;
                    }
                    if(end > content.length){
                      end = content.length;
                    }
                    slicesOfContent.push(mergeIntoSlice(content, start, end, indexOfContent));
                  }

                  // sort slices in content by search text's count and hits' count

                  slicesOfContent.sort(function (sliceLeft, sliceRight) {
                    if (sliceLeft.searchTextCount !== sliceRight.searchTextCount) {
                      return sliceRight.searchTextCount - sliceLeft.searchTextCount;
                    } else if (sliceLeft.hits.length !== sliceRight.hits.length) {
                      return sliceRight.hits.length - sliceLeft.hits.length;
                    } else {
                      return sliceLeft.start - sliceRight.start;
                    }
                  });

                  // select top N slices in content

                  var upperBound = parseInt('1');
                  if (upperBound >= 0) {
                    slicesOfContent = slicesOfContent.slice(0, upperBound);
                  }

                  // highlight title and content

                  function highlightKeyword(text, slice) {
                    var result = '';
                    var prevEnd = slice.start;
                    slice.hits.forEach(function (hit) {
                      result += text.substring(prevEnd, hit.position);
                      var end = hit.position + hit.length;
                      result += '<b class="search-keyword">' + text.substring(hit.position, end) + '</b>';
                      prevEnd = end;
                    });
                    result += text.substring(prevEnd, slice.end);
                    return result;
                  }

                  var resultItem = '';

                  if (slicesOfTitle.length != 0) {
                    resultItem += "<li><a href='" + articleUrl + "' class='search-result-title'>" + highlightKeyword(title, slicesOfTitle[0]) + "</a>";
                  } else {
                    resultItem += "<li><a href='" + articleUrl + "' class='search-result-title'>" + title + "</a>";
                  }

                  slicesOfContent.forEach(function (slice) {
                    resultItem += "<a href='" + articleUrl + "'>" +
                      "<p class=\"search-result\">" + highlightKeyword(content, slice) +
                      "...</p>" + "</a>";
                  });

                  resultItem += "</li>";
                  resultItems.push({
                    item: resultItem,
                    searchTextCount: searchTextCount,
                    hitCount: hitCount,
                    id: resultItems.length
                  });
                }
              })
            };
            if (keywords.length === 1 && keywords[0] === "") {
              resultContent.innerHTML = '<div id="no-result"><i class="fa fa-search fa-5x" /></div>'
            } else if (resultItems.length === 0) {
              resultContent.innerHTML = '<div id="no-result"><i class="fa fa-frown-o fa-5x" /></div>'
            } else {
              resultItems.sort(function (resultLeft, resultRight) {
                if (resultLeft.searchTextCount !== resultRight.searchTextCount) {
                  return resultRight.searchTextCount - resultLeft.searchTextCount;
                } else if (resultLeft.hitCount !== resultRight.hitCount) {
                  return resultRight.hitCount - resultLeft.hitCount;
                } else {
                  return resultRight.id - resultLeft.id;
                }
              });
              var searchResultList = '<ul class=\"search-result-list\">';
              resultItems.forEach(function (result) {
                searchResultList += result.item;
              })
              searchResultList += "</ul>";
              resultContent.innerHTML = searchResultList;
            }
          }

          if ('auto' === 'auto') {
            input.addEventListener('input', inputEventFunction);
          } else {
            $('.search-icon').click(inputEventFunction);
            input.addEventListener('keypress', function (event) {
              if (event.keyCode === 13) {
                inputEventFunction();
              }
            });
          }

          // remove loading animation
          $(".local-search-pop-overlay").remove();
          $('body').css('overflow', '');

          proceedsearch();
        }
      });
    }

    // handle and trigger popup window;
    $('.popup-trigger').click(function(e) {
      e.stopPropagation();
      if (isfetched === false) {
        searchFunc(path, 'local-search-input', 'local-search-result');
      } else {
        proceedsearch();
      };
    });

    $('.popup-btn-close').click(onPopupClose);
    $('.popup').click(function(e){
      e.stopPropagation();
    });
    $(document).on('keyup', function (event) {
      var shouldDismissSearchPopup = event.which === 27 &&
        $('.search-popup').is(':visible');
      if (shouldDismissSearchPopup) {
        onPopupClose();
      }
    });
  </script>





  

  

  

  
  

  

  

  

</body>
</html>
