package cn.rslee.scala.demos

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object test {
  def main(args: Array[String]): Unit =
    {
      System.setProperty("spark.ui.showConsoleProgress", "false");
      println("开始运行wordcount")
      val sc = new SparkContext(new SparkConf().setAppName("wordcount").setMaster("local"))
      println("开始读取文本文件...")
      val textFile = sc.textFile("file://D:/test.txt")
      println("开始创建RDD...")
      val countsRDD = textFile.flatMap(f => f.split(" ")).map(word => (word, 1))
        .reduceByKey(_ + _)
      println("开始保存到文本文件...")
      try {
        countsRDD.saveAsTextFile("file://D:/test.txt")
        println("已经存盘成功")
      } catch {
        case t: Exception => println("输出目录已经存在,请先删除原目录") // TODO: handle error
      }
    }
}