package com.grf.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 计算单词个数
  */
object WordCount {
  def main(args: Array[String]): Unit = {
    //题目要求：从文件读取出单词，选出带H的单词，统计该单词出现的次数，把单词都改为小写
    val conf = new SparkConf().setAppName("test").setMaster("local[*]")
    val sc = new SparkContext(conf)
    val rdd: RDD[String] = sc.textFile("a")
    //rdd.collect().foreach(println)
    val words = rdd.flatMap(_.split(" ")).map((_,1))
    //words.collect().foreach(println(_))
    val res = words.reduceByKey(_+_)
    //val res1 = words.reduceByKey((a:Int,b:Int)=>a+b)
    res.collect().foreach(println(_))
  }
}
