package com.atguigu.sparkcore.day01.singlevalue

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Author atguigu
 * Date 2020/10/27 15:49
 */
object DistinctDemo {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setAppName("DistinctDemo").setMaster("local[2]")
        val sc: SparkContext = new SparkContext(conf)
        /*val list1 = List(30, 50, 70, 60, 10, 20, 30, 50, 70, 60, 10, 20, 10, 20)
        val rdd1: RDD[Int] = sc.parallelize(list1, 2)
        
        val rdd2 = rdd1.distinct()*/
        val list1 = List(User(10, "zs"), User(10, "lisi"))
        
        val rdd1 = sc.parallelize(list1)
        /*val rdd2 = rdd1.distinct()*/
       /*val rdd2 =  rdd1.groupBy(_.age).map{
            case (_, users) => {
                users.head
            }
        }*/
       val rdd2 =  rdd1
           .groupBy(_.age)
           .map(_._2.head)
        rdd2.collect.foreach(println)
        
        sc.stop()
        
    }
}
/*
去重:
    distinct
    groupBy

 */


case class User(age: Int, name: String){
    /*override def hashCode(): Int = {
        println("hashCode")
        age
    }
    
    override def equals(obj: Any): Boolean = {
        println("equals...")
        obj match {
            case User(age1, name1) =>
                age1 == age
        }
    }*/
}
