package com.burges.net.dataStream.codeRuler.transform.SingleDataStream

import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.api.java.tuple.Tuple
import org.apache.flink.streaming.api.scala.{KeyedStream, StreamExecutionEnvironment, _}

/**
  * 创建人    BurgessLee 
  * 创建时间   2020/1/26 
  * 描述      Reduce使用
  */
object ReduceDemo {

	def main(args: Array[String]): Unit = {
		val environment = StreamExecutionEnvironment.getExecutionEnvironment

		val dataStream = environment.fromElements(("a", 3), ("b", 4), ("c", 5), ("c", 6))
		//hiding第一个字段为分区Key
		val keyedStream: KeyedStream[(String, Int), Tuple] = dataStream.keyBy(0)
		//滚动对第二个字段进行reduce相加操作
		// 这里定义的必须满足结合律和交换律
		// 实现方式一
		val reduceStream = keyedStream.reduce((x,y) => (x._1, x._2 + y._2))
		reduceStream.print()

		// 实现方式2
		val reduceStream02 = keyedStream.reduce(new ReduceFunction[(String, Int)] {
			override def reduce(t: (String, Int), t1: (String, Int)): (String, Int) = {
				(t._1, t1._2 + t._2)
			}
		})
		reduceStream02.print()

		environment.execute("streaming ReduceOperator")
	}

}
