package com.zhl.spark.core.wc

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @program: demos
 * @description: ${description}
 * @author: 刘振华
 * @create: 2020-11-27 17:00
 **/
object WordCount {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf().setMaster("local").setAppName("wordcount")

        val sc = new SparkContext(conf)

//        wc1(sc)

        wc2(sc)

        //关闭连接
        sc.stop()

    }



    def wc2(sc: SparkContext)={

        sc.textFile("spark//datas")
            .flatMap(_.split(" ")).map((_,1))
            .reduceByKey(_+_).map(m=>(m._2,m._1)).sortByKey(false).map(m=>(m._2,m._1))
            .collect().foreach(println)
    }

    def wc1(sc: SparkContext)={

        val lines: RDD[String] = sc.textFile("spark//datas")

        //        val words = lines.flatMap(_.split(" "))
        val words = lines.flatMap(line => line.split(" "))

        val group = words.groupBy(word => word)

        val rs = group.map{
            case (word,list)=>{
                (word,list.size)
            }
        }

        rs.collect().foreach(println)
    }

}
