package com.xinqing.bigdata.rdd

import org.apache.spark.{SparkConf, SparkContext}

/**
  * @Author:CHQ
  * @Date:2020 /6/19 15:27
  * @Description
  */
object WordCount {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("word count")
    val sc = new SparkContext(conf)
    val tuple = sc.textFile("D:\\Code\\data-report-service\\spark-demo\\src\\main\\resources\\word")
      .flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _).collect()
    tuple.foreach(println)

  }
}

