package worldCount

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD


object worldCount1 {
  def main(args: Array[String]): Unit = {

      //创建SparkConf，并设置app名称
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("worldCount")

    //创建sparkContext，该对象是提交spark app的入口
    val sc: SparkContext = new SparkContext(conf)

    val rdd: RDD[String] = sc.textFile("E:\\bigdata0426\\spark-core-test\\input")

    val lineRdd: RDD[String] = rdd.flatMap(_.split(" "))

    val wordRdd: RDD[(String, Int)] = lineRdd.map((_,1))

    val count: RDD[(String, Int)] = wordRdd.reduceByKey(_+_)

    count.collect().foreach(println(_))
//dsfsd





  }

}
