package com.mjf.transformation

import org.apache.flink.api.common.RuntimeExecutionMode
import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.streaming.api.scala._

object ReduceDemo {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    env.setRuntimeMode(RuntimeExecutionMode.BATCH)

    val source: DataStream[String] = env.fromCollection(List("hello world", "hello java"))

    val result: DataStream[(String, Int)] = source.flatMap(_.split("\\W+"))
      .map((_, 1))
      .keyBy(_._1)
      .reduce(new MyReduceFunction)

    result.print("result")

    env.execute(UnionDemo.getClass.getName)

  }
}

class MyReduceFunction extends ReduceFunction[(String, Int)] {
  override def reduce(value1: (String, Int), value2: (String, Int)): (String, Int) = {
    (value1._1, value1._2 + value2._2)
  }
}