package cn.demo.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object demo {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("WordCount").setMaster("local[2]")
    val context = new SparkContext(sparkConf)
    val data:RDD[String] = context.textFile("D:\\IDE\\Workspace_IDE\\spark_study\\spark_core\\src\\main\\resources\\testdk.txt")
    val words:RDD[String] = data.flatMap(_.split(" "))
    val wordOne:RDD[(String,Int)] = words.map(x => (x, 1))
    val result:RDD[(String, Int)] = wordOne.reduceByKey(_ + _)
    val finalResult:Array[(String,Int)] = result.collect()
    println( Console.GREEN + finalResult.toBuffer)
    context.stop()
  }

}
