import org.apache.spark.sql.SparkSession

/**
  *
  * @author Abyss
  * @date 2019/8/30
  * @description
  */
object First {
  def main(args: Array[String]): Unit = {
    val session = SparkSession.builder().appName("wordcount").master("local").getOrCreate()

    val input = session.sparkContext.textFile("/Users/abyss/Dev/toys/wordcount/input/aaa.txt")

    val lines = input.flatMap(line => line.split(" "))
    val count = lines.map(word => (word, 1)).reduceByKey { case (x, y) => x + y }
    count.saveAsTextFile("/Users/abyss/Dev/toys/wordcount/output")

    session.stop()
  }

}
