package com.offcn.spark.p1

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Auther: BigData-LGW
 * @ClassName: WordCount
 * @Date: 2020/12/5 9:46
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object WordCount {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
            .setMaster("local[*]")
            .setAppName("SparkWordCount")
        val sc = new SparkContext(conf)
        val lineRDD:RDD[String] = sc.textFile("F:/Hello.txt")
        val wordsRDD:RDD[String] = lineRDD.flatMap(line => line.split("\\s+"))
        val pairsRDD:RDD[(String,Int)] = wordsRDD.map(word => (word,1))
        val ret:RDD[(String,Int)] = pairsRDD.reduceByKey((v1,v2) => v1 + v2)
        ret.foreach(t => println(t._1 + "-------->" + t._2))
        sc.stop()
    }
}
