package com.fwmagic.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, SparkSession}

object WordCount {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("wc")
      //.master("local")
      .getOrCreate()

    import spark.implicits._

    val dataRDD: Dataset[String] = spark.read.textFile("hdfs://ns1/user/zx_dm/data/a.txt")
    val wordAndOne: Dataset[(String, Int)] = dataRDD.flatMap(line => line.split("\\s+"))
      .map((_, 1))

    val result: RDD[(String, Int)] = wordAndOne.rdd.reduceByKey(_ + _).sortBy(_._2)


    result.collect().foreach(println)

    spark.close()

  }

}
