package com.zzl.scala

import org.apache.spark.{SparkConf, SparkContext}

object WordCount {
  def main(args: Array[String]): Unit = {
    val sp = new SparkConf();
    sp.setAppName("WordCount")
    sp.setMaster("local[4]")
    val sc = new SparkContext(sp);
    val rdd = sc.textFile("hdfs://zhangzeli-node1:9000/input/LICENSE.txt")
//    val rdd2 = rdd.map((x:String)=>{
//      println("map :log")
//      x+"~"
//    })
    val wordRDD = rdd.flatMap(line=>line.split(" "))

    val paritRDD =  wordRDD.map(word=>(word,1))
    val resultRDD = paritRDD.reduceByKey((v1:Int,v2:Int)=>v1+v2)

    resultRDD.foreach(println)
    resultRDD.saveAsTextFile("hdfs://zhangzeli-node1:9000/output")
    sc.stop()
  }

}
