package day3

import Utils.SparkUtils
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

object Test3 {
  def main(args: Array[String]): Unit = {
    val sc: SparkContext = SparkUtils.getSparkContext(4, "yy")

    /*val list = List(1, 2, 3, 4, 5, 6, 7, 8, 9)
    val rdd1: RDD[Int] = sc.makeRDD(list, 2)
    val rdd2: RDD[(Int, Iterable[Int])] = rdd1.groupBy(v => v % 2)
    //    rdd2.collect().foreach(v => println(v))
    val rdd3: Array[(Int, Iterable[Int])] = rdd2.collect()
    for (elem <- rdd3) {
      println(elem)
    }*/

    val rdd: RDD[String] = sc.textFile("hdfs://hadoop10:9000/test1.txt")
    val rdd2: RDD[(String, Int)] = rdd.map(v => {
      val arr: Array[String] = v.split(",")
      (arr(1), 1)
    })
    val rdd3: RDD[(String, Iterable[(String, Int)])] = rdd2.groupBy(v => v._1)
    rdd3.map(v => (v._1,v._2.size)).saveAsTextFile("hdfs://hadoop10:9000/out3")

    sc.stop()
  }
}
