package com.laosg.spark.base

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession

/**
  * Created by kaimin on 14/5/2019.
  * time : 10:28
  */
object WordCount {

  def main(args: Array[String]): Unit = {


    val conf = new SparkConf().setAppName("WordCount")
    val sc = new SparkContext(conf)
    val line = sc.textFile("hdfs://192.168.2.91:9000/Hadoop/Input/word.txt")
    val words = line.flatMap(d=>d.split(" "))
    val pairs = words.map((_,1))
    val res = pairs.reduceByKey(_+_)
    res.foreach(words=>println(words._1+" appeared "+words._2))

  }
}
