package com.cobra.wc

import org.apache.spark.{SparkConf, SparkContext}

object Spark01_WordCount {
  def main(args: Array[String]): Unit = {
    //Application
    //Spark框架
    //建立和Spark框架的连接
    val conf = new SparkConf().setMaster("local").setAppName("WordCount")
    val sc = new SparkContext(conf)
    //1.读取数据，获取一行一行的数据
    val lines = sc.textFile("datas")
    //2.将一行数据进行拆分，形成一个一个单词
    //扁平化:将整体拆分成个体的操作
    //"hello world"=>hello,world
    val words = lines.flatMap(_.split(" "))
    //3. 将数据进行分组便于统计
    //(hello,hello,hello) (world,world)
    val wordGroup = words.groupBy(word => word)
    //4. 对分组后的数据进行转换
    //(hello,3) (world,2)
    val wordToCount = wordGroup.map {
      case (word, list) => {
        (word, list.size)
      }
    }
    //5.将转换结果进行采集
    val tuples = wordToCount.collect()
    tuples.foreach(println)
    //关闭连接
    sc.stop()
  }
}
