package com.csw.flink.transformation

import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.api.java.functions.KeySelector

object Demo04KeyBy {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    env.setParallelism(12)

    val ds: DataStream[String] = env.socketTextStream("master", 8888)

    val kvDS: DataStream[(String, Int)] = ds.flatMap(_.split(",")).map((_, 1))


    /**
      * keyBy 将同一个单词分到同一个task中
      * 一般在keyBy后面需要接一个聚合算子
      *
      */

    //scala的方式
    val ds1: KeyedStream[(String, Int), String] = kvDS.keyBy(_._1)

    //java的方式
    var ds2: KeyedStream[(String, Int), String] = kvDS.keyBy(new KeySelector[(String, Int), String] {
      override def getKey(value: (String, Int)): String = {
        value._1
      }
    })

    //sum(下标或者类)  求和
    val ds3: DataStream[(String, Int)] = ds2.sum(1)


    //reduce：在keyBy后使用   对一个key进行聚合


    //scala的写法
    val ds4: DataStream[(String, Int)] = ds2.reduce((a, b) => (a._1, a._2 + b._2))

    //java的方式  匿名内部类
    var ds5: DataStream[(String, Int)] = ds2.reduce(new ReduceFunction[(String, Int)] {
      override def reduce(value1: (String, Int), value2: (String, Int)): (String, Int) = {
        (value1._1, value1._2 + value2._2)
      }
    })
    ds5.print()
    env.execute()


  }
}
