package com.offcn.spark.p1

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Auther: BigData-LGW
 * @ClassName: WordCount3
 * @Date: 2020/12/5 15:13
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object RemotWordCount {
    def main(args: Array[String]): Unit = {
        if(args == null || args.length!=1){
            println(
                """
                  |Usage:<input>
                  |""".stripMargin)
            System.exit(1)
        }
        val Array(input) = args
        val conf = new SparkConf()
            .setAppName(s"${RemotWordCount.getClass.getSimpleName}")
        val sc = new SparkContext(conf)
        val lineRDD:RDD[String] = sc.textFile(input)
        println("lines rdds partition is:"+lineRDD.getNumPartitions)
        val ret = lineRDD.flatMap(_.split("\\s+")).map((_,1)).reduceByKey(_+_)
        ret.foreach{case (word,count) => println(word + "----->" + count)}
        sc.stop()
    }
}
