package cn.doitedu.day02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object T22_DistinctDemo {

  def main(args: Array[String]): Unit = {

    //1.创建SparkConf
    val conf = new SparkConf().setAppName("DistinctDemo")
      .setMaster("local[4]")
    //2.创建SparkContext
    val sc = new SparkContext(conf)

    val arr = Array(
      "spark", "hive", "spark", "flink",
      "spark", "hive", "hive", "flink",
      "flink", "flink", "flink", "spark"
    )
    val rdd1: RDD[String] = sc.parallelize(arr, 3)

    val resRdd = rdd1.distinct()

    //val tpRdd: RDD[(String, Null)] = rdd1.map((_, null))
    //val resRdd: RDD[String] = tpRdd.groupByKey().keys

    //val resRdd: RDD[String] = tpRdd.reduceByKey((a, _) => a).keys

    println(resRdd.collect().toBuffer)

  }

}
