package com.shujia.tf

import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.streaming.api.scala._

object Demo5Reduce {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val listDS: DataStream[String] = env.socketTextStream("master", 8888)

    val wordsDS: DataStream[String] = listDS.flatMap(_.split(","))

    val kvDS: DataStream[(String, Int)] = wordsDS.map((_, 1))

    val keyByDS: KeyedStream[(String, Int), String] = kvDS.keyBy(_._1)

    /**
      * reduce: 再keyBy只能才能使用，对同一个key的数据进行聚合计算
      *
      */

    val countDS: DataStream[(String, Int)] = keyByDS.reduce((kv1, kv2) => (kv1._1, kv1._2 + kv2._2))
    //countDS.print()


    val javaApiDS: DataStream[(String, Int)] = keyByDS.reduce(new ReduceFunction[(String, Int)] {
      override def reduce(kv1: (String, Int), kv2: (String, Int)): (String, Int) = {
        (kv1._1, kv1._2 + kv2._2)
      }
    })
    javaApiDS.print()

    env.execute()

  }

}
