package com.andnnl

import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.{Logger, LoggerFactory}

/**
  * A Main to run Camel with MyRouteBuilder
  */
object WordCount {

    def main(args: Array[String]) {
        val log = LoggerFactory.getLogger(WordCount.getClass)

        if (System.getenv("HADOOP_HOME") == null && System.getProperty("hadoop.home.dir") == null) {
            System.setProperty("hadoop.home.dir", "D:\\hadoop-common-2.2.0-bin-master")
        }

        log.info("开始执行word count")
        val conf = new SparkConf()
        conf.setAppName("wordcount yarn")
//            conf.setMaster("spark://master:7077")
            conf.setMaster("local")
        val sc = new SparkContext(conf)

        val line = sc.textFile("hdfs://master/test.txt")

        line.flatMap(_.split(" ")).filter(!_.isEmpty)
            .map((_, 1)).reduceByKey(_ + _).sortBy(_._2, false).take(10).foreach(a => println(a.toString()))

        sc.stop()
        log.info("结束执行word count")
    }
}

