package com.zhaosc.spark.core

import org.apache.tools.ant.taskdefs.optional.EchoProperties.Tuple
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object FilterMoreKey {
  def main(args: Array[String]): Unit = {
    val nameList = List(
      new Tuple2(1, "xuruyun"),
      new Tuple2(2, "liangyongqi"),
      new Tuple2(3, "wangfei1"),
      new Tuple2(3, "wangfei2"),
      new Tuple2(3, "wangfei3"),
      new Tuple2(3, "wangfei4"),
      new Tuple2(3, "wangfei5"),
      new Tuple2(3, "wangfei6"),
      new Tuple2(3, "wangfei7"),
      new Tuple2(3, "wangfei9"));

    val conf = new SparkConf().setMaster("local")
      .setAppName("SkewedJoin");
    val sc = new SparkContext(conf);
    val nameRdd = sc.parallelize(nameList);

    val id = nameRdd.sample(false, 0.8).map(v => {
      Tuple2(v._1, 1)
    }).reduceByKey(_ + _)
      .map(v => {
        v.swap
      })
      .sortByKey(false)
      .take(1)(0)._2

    val result = nameRdd.filter(v => {
      v._1 != id
    }).foreach(println _)
  }
}

