package spark.rddKnowledge

import org.apache.spark.sql.SparkSession

/**
  * @author pinker on 2018/6/9
  */
object WordCount {
  val localPath = "D:/spark/temp/"
  val hivePath = "D:/spark/hive/"

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("WordCount")
      .master("local[3]")
      .config("spark.local.dir", localPath)
      .config("spark.sql.warehouse.dir", hivePath)
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("ERROR")
    val textFile = sc.textFile("src/main/scala/spark/CoalesceDemo.scala", 4)
    val rdd1 = textFile.flatMap(line => line.split(" "))
    val rdd2 = rdd1.filter(_.length >= 2)
    val rdd3 = rdd2.map(word => (word, 1))
    val counts = rdd3.reduceByKey(_ + _)
    rdd1.toDebugString
    rdd2.toDebugString
    rdd3.toDebugString
    counts.toDebugString
    counts.saveAsTextFile("src/main/resources/rddData/rddKnowledge.txt")
  }
}
