package com.gitee.broadcast

import org.apache.flink.api.common.JobExecutionResult
import org.apache.flink.api.common.accumulators.IntCounter
import org.apache.flink.api.common.functions.RichMapFunction
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.configuration.Configuration
import org.apache.flink.core.fs.FileSystem.WriteMode
/*
  1. 确定需要累加的元素
  2. 创建累加器, 在RichMapFunction中完成
    new IntCounter()
  3. 注册累加器, 因为只需要注册一次, 我们使用重写open方法
    getRuntimeContext.addAccumulator("intCounter", intCounter)
  4. 在map方法中用add方法进行累加
  5. execute结束后获取累加结果getAccumulatorResult(累加器名字)
 */
object AccumulatorDemo {
  def main(args: Array[String]): Unit = {
    val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment
    import org.apache.flink.api.scala._
    val data: DataSet[String] = env.fromElements("aaaa","bbbb","cccc","dddd")
    //使用累加器计数有多少行
    val data2: DataSet[String] = data.map(new RichMapFunction[String, String] {
      //创建累加器
      val intCounter = new IntCounter()
      //创建一个普通变量
      var sum: Int = 0

      override def open(parameters: Configuration): Unit = {
        //注册累加器
        getRuntimeContext.addAccumulator("intCounter", intCounter)
      }

      override def map(value: String): String = {
        intCounter.add(1)
        sum += 1
        println(sum) //注意:如果不使用累加器,多subTask并行的时候,sum就是各个subTask自己的sum,总共的sum在累加器是正确的
        value
      }
    }).setParallelism(1)
    // 这里我们不能直接print, 因为print与execute冲突了
    data2.writeAsText("file:///D:/data/input/count",WriteMode.OVERWRITE)
    val jobExecutionResult: JobExecutionResult = env.execute()

    //获取累加器结果, 手动指定类型
    val count: Int = jobExecutionResult.getAccumulatorResult("intCounter")
    println("intCounter:"+count)
  }

}
