package com.demo.study

import org.apache.spark.{SparkConf, SparkContext}

object WordCount {
  def main(args: Array[String]) {

    System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.7.3-bin-master")
    System.setProperty("HADOOP_USER_NAME", "root")

    val conf = new SparkConf().setMaster("local[2]").setAppName("wordcount")
    val sc = new SparkContext(conf)
    val data = sc.parallelize(Seq("abc def", "def ghi", "abc def"))
    val result = data.flatMap(_.split(" "))
      .map(word => (word, 1))
      .reduceByKey(_ + _)

    result.foreach(println)
    val time = System.currentTimeMillis()
    result.saveAsTextFile(s"hdfs://hadoop01:9000/data01/kafka2hdfs/wordcount_${time}")


    sc.stop()
  }
}
