package com.wangwg.sparkTest

import java.util.UUID

import org.apache.spark.{SparkConf, SparkContext}

object WordCount {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("SparkTest001").setMaster("local[5]");
    //val sparkConf = new SparkConf().setAppName("SparkTest001").setMaster("spark://geomesa1.com:7077")
    val sparkContext = new SparkContext(sparkConf)
    /*val inputFile = "D:\\work\\大数据\\001.txt"
    val conf = new SparkConf().setAppName("WordCount").setMaster("geomesa1.com")
    val sc = new SparkContext(conf)
    val textFile = sc.textFile(inputFile)
    val wordCount = textFile.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey((a, b) => a + b)
    wordCount.foreach(println)*/

    /* val conf = new SparkConf().setAppName("test").setMaster("local[4]")
     val sc = new SparkContext(conf)
     val rdd1 = sc.parallelize(List(1, 2, 3))
     print(rdd1.reduce(_ + _))*/
    val textRdd = sparkContext.textFile("hdfs://geomesa1.com:9000/spark/test.txt");
    val flatRdd = textRdd.flatMap(_.split(" ")).map((_, 1));
    val splitRdd = flatRdd.reduceByKey(_ + _);
    val countMap = splitRdd.collect();
    countMap.foreach(println)
    val id = UUID.randomUUID.toString;
    sparkContext.parallelize(countMap.toSeq).saveAsTextFile("hdfs://geomesa1.com:9000/spark/result/" + id)
    sparkContext.stop();
  }
}
