package com.shujia.flink.tf

import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.api.java.functions.KeySelector
import org.apache.flink.streaming.api.scala._

object Demo4KeyBy {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val ds: DataStream[String] = env.socketTextStream("master", 8888)

    val kvDS: DataStream[(String, Int)] = ds.flatMap(_.split(",")).map((_, 1))

    /**
      * keyBy 将同一个单词分到同一个task中
      * 一般在keyby后需要借一个聚合算子
      *
      */

    //scala
    val keyDS: KeyedStream[(String, Int), String] = kvDS.keyBy(_._1)


    //java
    val ds2: KeyedStream[(String, Int), String] = kvDS.keyBy(new KeySelector[(String, Int), String] {
      override def getKey(value: (String, Int)): String = {
        value._1
      }
    })

    //val ds3: DataStream[(String, Int)] = ds2.sum(1)

    /**
      * reduce : 在keyby后使用对一个key 进行聚合
      *
      */


    //scala
    //val ds4: DataStream[(String, Int)] = ds2.reduce((kv1, kv2) => (kv1._1, kv1._2 + kv2._2))


    val ds4: DataStream[(String, Int)] = ds2.reduce(new ReduceFunction[(String, Int)] {
      override def reduce(value1: (String, Int), value2: (String, Int)): (String, Int) = {
        (value1._1, value1._2 + value2._2)
      }
    })

    ds4.print()

    env.execute()

  }

}
