package com.ww.spark.wc

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object WordCountScala {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName("wordCount")
    conf.setMaster("local")

    val sc = new SparkContext(conf)
    val fileRDD: RDD[String] = sc.textFile("data/testdata.txt")
    val words = fileRDD.flatMap((x: String) => {
      x.split(" ")
    })
    var pairWord = words.map((x: String) => {
      new Tuple2(x, 1)
    })
    val res = pairWord.reduceByKey((x: Int, y: Int) => {
      x + y
    })
    res.foreach(println)

    //简化版
    val fileRDD1: RDD[String] = sc.textFile("data/testdata.txt")
    fileRDD1.flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _).foreach(println)


    println(4 % 3)
  }

}
