package com.gitee.Transformation

import com.gitee.Transformation.partition.PartitionInspector
import org.apache.flink.api.common.operators.Order
import org.apache.flink.api.scala.{DataSet, ExecutionEnvironment}

import scala.collection.mutable
import scala.util.Random

/*
  分区方法
 */
object PartitionTest extends PartitionInspector {
  def main(args: Array[String]): Unit = {
    val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(2)
    val datas = new mutable.MutableList[(Int, Long, String)]
    datas.+=((1, 1L, "Hello"))
    datas.+=((2, 2L, "Hello"))
    datas.+=((3, 2L, "Hello"))
    datas.+=((4, 3L, "Hello"))
    datas.+=((5, 3L, "Hello"))
    datas.+=((6, 3L, "hehe"))
    datas.+=((7, 4L, "hehe"))
    datas.+=((8, 4L, "hehe"))
    datas.+=((9, 4L, "hehe"))
    datas.+=((10, 4L, "hehe"))
    datas.+=((11, 5L, "hehe"))
    datas.+=((12, 5L, "hehe"))
    datas.+=((13, 5L, "hehe"))
    datas.+=((14, 5L, "hehe"))
    datas.+=((15, 5L, "hehe"))
    datas.+=((16, 6L, "hehe"))
    datas.+=((17, 6L, "hehe"))
    datas.+=((18, 6L, "hehe"))
    datas.+=((19, 6L, "hehe"))
    datas.+=((20, 6L, "hehe"))
    datas.+=((21, 6L, "hehe"))
    import org.apache.flink.api.scala._
    val collection: DataSet[(Int, Long, String)] = env.fromCollection(Random.shuffle(datas))

    // 使用hash分区
    val result01: DataSet[(Int, Long, String)] = collection.partitionByHash(_._3)
    inspector(result01)

    // 安照数字的范围进行划分
    val result02: DataSet[(Int, Long, String)] = collection.partitionByRange(_._1)
    inspector(result02)

    // sortPartition 对某个分区进行排序
    val result03: DataSet[(Int, Long, String)] = collection.sortPartition(_._2, Order.DESCENDING)
    inspector(result03)
  }
}
