package com.ydl.learning.flink.demo.transformations

import com.ydl.learning.flink.demo.Utils
import org.apache.flink.api.common.operators.Order
import org.apache.flink.api.scala._

/**
 *
 *
 * @author ydl
 * @since 2020/10/14
 */
object PartitionDemo extends App with Utils {
  val input = env.fromCollection(dataDemo)
  //根据指定key的哈希值对数据集进行分区，某一key集中时还是会出现数据倾斜
  printDataSet(input.partitionByHash(1))

  //根据指定的key对数据集进行范围分区
  printDataSet(input.partitionByRange(0))

  //对分区内数据进行排序
  printDataSet(input.sortPartition(0, Order.DESCENDING))

  def printDataSet(dataSet: DataSet[(Int, String, Double)]): Unit = {
    dataSet.mapPartition(par => {
      par.foreach(data => {
        println("当前线程id：" + Thread.currentThread().getId + "," + data)
      })
      par
    }).print()
  }

}
