package com.zt.bigdata.flink.stream

import java.util

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala._
import scala.collection.JavaConverters._

/**
  * @ClassName Demo
  * @Description
  * @Author zhangtonghy
  * @Date 2019-07-17 14:40
  * @Copyright: 版权所有 (C) zt zt.
  * @注意 ：本内容仅限于zt内部传阅，禁止外泄以及用于其他的商业目的
  **/
object Demo {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(2)
    val order = env.fromElements(
      ("A", 1), ("A", 4), ("A", 2), ("A", 8),
      ("B", 3), ("B", 7), ("B", 1),
      ("C", 5), ("C", 6), ("C", 2)
    )

    //15 + 11 + 13
    val keyedOrder = order.keyBy(0)
    keyedOrder
      .sum(1)
      .keyBy(_ => "")
      .fold(new util.HashMap[String, Integer]())((map, o) => {
        map.put(o._1, o._2)
        map
      }).addSink(x => {
      val sum = x.values().asScala.map(_.toInt).sum
      println(sum)
    })
    env.execute()

  }
}
