package cn.edu.flink.scala.tutorial.partition

import cn.edu.flink.scala.tutorial.wordcount.source.WordCountSourceFunction
import org.apache.flink.api.common.functions.Partitioner
import org.apache.flink.streaming.api.scala._

object PartitionerTest {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
    env.setParallelism(2)
    // 读取文本文件
    val lineDS = env.addSource(WordCountSourceFunction())

    val wordone = lineDS.flatMap(_.split(" ")).map((_, 1))

    // 分区不同，默认采用 rebalance 分区策略重分区
    wordone.print("print").setParallelism(10).name("print")

    //1、采用Global分区策略重分区
    wordone.global.print("global").setParallelism(10).name("global")
    //2、采用SHUFFLE分区策略重分区（随机的）
    wordone.shuffle.print("shuffle").setParallelism(10).name("shuffle")
    //3、采用rebalance分区策略重分区
    wordone.rebalance.print("rebalace").setParallelism(10).name("rebalace")
    //4、采用rescale分区策略重分区
    wordone.rescale.print("rescale").setParallelism(10).name("rescale")
    //5、采用broadcast分区策略重分区
    wordone.broadcast.print("broadcast").setParallelism(10).name("broadcast")
    //6、forward, Forward partitioning does not allow change of parallelism.
    wordone.forward.print("forward").disableChaining().name("forward")
    //7、hash
    wordone.keyBy(_._1).print("hash").setParallelism(10).name("hash")

    wordone.partitionCustom(new Partitioner[Int] {
      override def partition(k: Int, i: Int): Int = 1
    }, _._2).print("partitionCustom").setParallelism(10).name("partitionCustom")

    env.execute("PartitionerTest")
  }
}
