import org.apache.spark.{SparkConf, SparkContext}

object Saprk_Env {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local").setAppName("saprk_env")
    val sc = new SparkContext(conf)

    val word = sc.textFile("data/word.txt")
    word.collect().foreach(println)

    //word,List((word,1),(word,1),(word,2))
    //(java,CompactBuffer((java,1), (java,1)))
    //    val value = word.flatMap(_.split(" ")).map((_, 1)).groupBy(_._1)

    //(spark,1)
    //(hive,1)
    //(hadoop,1)
    //(yarn,1)
    //    val value = word.flatMap(_.split(" ")).map((_, 1))

    val value = word.flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _)

    value.collect().foreach(println)

    //释放资源
    sc.stop()


  }

}
