package com.shujia.flink.tf

import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.streaming.api.scala._

object Demo5Reduce {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)

    val kvDS: DataStream[(String, Int)] = linesDS.flatMap(_.split(",")).map((_, 1))


    val keyByDS: KeyedStream[(String, Int), String] = kvDS.keyBy(_._1)

    /**
      *
      * reduce 对同一个key的数据进行聚合计算
      * flink 中的聚合没有预计算，每一条数据都会输出一个结果
      *
      */

    //scala api
    val scalaReduceDS: DataStream[(String, Int)] = keyByDS.reduce((x, y) => (x._1, x._2 + y._2))

    //java api
    val javaReduceDS: DataStream[(String, Int)] = keyByDS.reduce(new ReduceFunction[(String, Int)] {
      override def reduce(x: (String, Int), y: (String, Int)): (String, Int) = {
        (x._1, x._2 + y._2)
      }
    })

    javaReduceDS.print()


    env.execute()

  }

}
