package com.hzh.flink.core

import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.streaming.api.scala._

import scala.collection.mutable

object Demo11HashMap {
  def main(args: Array[String]): Unit = {
    //创建环境
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    /**
     * 读取socket的数据
     */
    val lines: DataStream[String] = env.socketTextStream("master", 8888)

    val keyByDS: KeyedStream[String, String] = lines.flatMap(_.split(","))
      .keyBy(word => word)

    val countDS: DataStream[(String, Int)] = keyByDS.map(new MapFunction[String, (String, Int)] {
      //用于保存单词的数量 （状态）

      /**
       * 如果一个普通的集合保存之前的计算结果，如果任务失败，数据会丢失
       * flink的状态中的数据会被checkPoint持久化到hdfs中，如果任务失败还可以恢复到之前的计算结果
       *
       * checkPoint默认未开启，需要手动开启
       */

      private val wordCount = new mutable.HashMap[String, Int]()

      override def map(value: String): (String, Int) = {
        //如果Map集合中有就获取更新，若没有就增加1
        var count: Int = wordCount.getOrElse(value, 0)
        //累加
        count += 1
        //更新
        wordCount.put(value, count)
        (value, count)
      }
    })

    countDS.print()
    env.execute()




  }

}
