package com.mjf.partition

import org.apache.flink.api.common.functions.Partitioner
import org.apache.flink.streaming.api.scala._

object PartitionCustomDemo {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(3)

    // 默认是轮询分区
    val source: DataStream[(String, String)] = env.fromElements(
      ("hadoop", "123"), ("hadoop", "abc"),
      ("spark", "123"), ("spark", "abc"),
      ("fink", "123"), ("fink", "234"), ("fink", "345"),
      ("fink", "abc"), ("fink", "bcd"), ("fink", "cde")
    )

    source.print("source")

    // 自定义分区
    val repartition: DataStream[(String, String)] = source.partitionCustom(new MyPartitioner, _._1)

    repartition.print("repartition")

    env.execute(PartitionCustomDemo.getClass.getName)

  }
}

class MyPartitioner extends Partitioner[String] {
  override def partition(key: String, numPartitions: Int): Int = {
    key.hashCode.abs % numPartitions
  }
}