package com.gt

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 代码快速格式化  shift + command + F
 * 运行快捷键     shift ctrl R
 * 查找实现类     shift command b (ctrl + alt + b)
 */
object AppScala {

  def main(args: Array[String]): Unit = {
    println("AppScala")
    val conf = new SparkConf().setMaster("local").setAppName("wc")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    val filePath="./data"
    val rdd = sc.textFile(filePath)
    val rdd2 = rdd.flatMap(line => line.split(" "))
    val rdd3: RDD[(String, Iterable[String])] = rdd2.groupBy(k => k)
    val rdd4 = rdd3.map(data => {
      val key = data._1
      val it = data._2
      val count = it.size
      (key, count)
    })


    rdd4.foreach(println)
    sc.stop()

  }

}
