package test

import org.apache.spark.{SparkConf, SparkContext}

object demo7 {
  def main(args: Array[String]): Unit = {
    //    val conf = new SparkConf().setAppName("myapp").setMaster("local")
    //    val sc = new SparkContext(conf)
    //    val files = sc.textFile("data/users.txt")
    //    val results = files.map(x => (x.split(" ")(0),x.split(" ")(1)))
    //    val group = results.groupByKey().map(x => (x._1,x._2.size))
    //    group.collect().foreach(println);

    //创建SparkConf()并设置App名称
    val conf = new SparkConf().setAppName("Scala_RDD_Additation3").setMaster("local[*]")
    //创建SparkContext，该对象是提交Spark App的入口
    val sc = new SparkContext(conf)
    //设置日志仅输出错误
    sc.setLogLevel("ERROR")
    //读取文件创建RDD
    var rdd1 = sc.textFile("data/input/users.txt")
    //切分
    var rdd2 = rdd1.flatMap(_.split("\n"))
    //切分做元组
    var rdd3 = rdd2.map(x => (x.split(" ")(1), x.split(" ")(0)))
    //map遍历rdd3取每个key的最小value
    //分组
    var rdd4 = rdd3.groupByKey()
    //取最小值
    var rdd5 = rdd4.map(x => (x._2.min, x._1))
    //统计
    var result = rdd5.countByKey()
    //打印
    result.foreach(println)
  }
}
