package com.cobra.wc

import org.apache.spark.{SparkConf, SparkContext}

//sc.textFile("/opt/module/data/word.txt").flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_).collect().foreach(println);
object Spark04_WordCount {
  def main(args: Array[String]): Unit = {
    //建立和Spark框架的连接
    val conf = new SparkConf().setMaster("local").setAppName("WordCount")
    val sc = new SparkContext(conf)
    //读取数据，获取一行一行的数据
    val lines = sc.textFile("datas/1.txt")
     lines.flatMap(_.split(" ")).map((_,1)).aggregateByKey(0)(_+_,_+_).collect().foreach(println)
    //关闭连接
    sc.stop()
  }
}
