package com.dtkavin.spark.demo

import org.apache.spark.{Logging, SparkConf, SparkContext}

/**
  * Created by Administrator on 2016/4/11 0011.
  * 本地模式跑wordcount
  * hadoop权限：本地配置
  */
class WordCount {

}

object WordCount extends Logging {
  def main(args: Array[String]) {
//    val conf = new SparkConf().setAppName("WordCount").setMaster("local[2]")
//    val sc = new SparkContext(conf);
//
//    logInfo(" SparkContext createed...")
//
//    //    sc.textFile("D:\\data\\input-index").flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_).saveAsTextFile("D:\\wcOut")
//    val res = sc.textFile("D:\\data\\input-index").flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _).collect
//    println(res.toBuffer)
//
//    logInfo("done")


    val conf = new SparkConf().setAppName("SparkWorkCount").setMaster("spark://spark01:7077")
    val sc = new SparkContext( conf)
        sc.addJar("E:\\idea_workspace\\spark-simulate\\spark-demo\\target\\spark-demo-1.0.jar")
    sc.textFile("hdfs://spark01:9000/wordcount").flatMap(_.split(" ")).map((_,1)).reduceByKey(_ + _).saveAsTextFile("hdfs://spark01:9000/output/wordcount")
    sc.stop()
  }
}
