package com.shujia.flink.state

import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.streaming.api.scala._

import scala.collection.mutable

object Demo1NoStateWC {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment


    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)

    val kvDS: DataStream[(String, Int)] = linesDS.flatMap(_.split(",")).map((_, 1))

    val keyByDS: KeyedStream[(String, Int), String] = kvDS.keyBy(_._1)

    val countDS: DataStream[(String, Int)] = keyByDS.map(new MapFunction[(String, Int), (String, Int)] {

      //key 是单词，value 是单词的数量
      //保存之前的计算结果
      /**
        * map集合的数据保存在内存中
        * 任务失败重启之后map集合中的数据会丢失
        *
        *
        * flink的状态(之前的计算结果)
        * 状态的普通的集合变量的区别
        *
        * flink的状态会被checkpoint持久化到hdfs中，如果任务失败了，
        * 重新启动，可以从hdfs中恢复任务，保证之前的计算结果不丢失
        *
        */
      val map = new mutable.HashMap[String, Int]()

      override def map(value: (String, Int)): (String, Int) = {

        val word: String = value._1

        //从map集合中获取单词的数量，如果由就返回，如果没有就返回0
        val count: Int = map.getOrElse(word, 0)

        //统计新的单词的数量
        val newCount: Int = count + 1

        //覆盖map集合
        map.put(word, newCount)

        (word, newCount)
      }
    })

    countDS.print()

    env.execute()


  }

}
