package com.doit.day03

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Test02 {
  def main(args: Array[String]): Unit = {

       val  conf  = new SparkConf()
             .setMaster("local[*]")
             .setAppName(this.getClass.getSimpleName)
           val sc = SparkContext.getOrCreate(conf)

    val rdd =  sc.textFile("data/accounts/")

    val citys: Array[String] = rdd.map(line => {
      line.split(",")(1)
    }).distinct().collect()


    val rdd2 = rdd.map(line => {
      val arr = line.split(",")
      (arr(1) , (arr(0), arr(1)))
    })


    val res: RDD[(String, (String, String))] = rdd2.partitionBy(new MyPartitioner02(citys))


    res.mapPartitionsWithIndex((p, tps)=>{
      tps.map(tp=>tp._1+"-----"+p)
    }).foreach(println)




  }

}
