package com.atguigu.stream.transale.withstate

import com.atguigu.stream.util.MyApp
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.ReceiverInputDStream

/**
 * description ：允许修改key的状态，并且将它的状态保存下来
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/1/15 19:50
 * modified By ：
 * version:    : 1.0
 */
object UpdateStateByKey extends MyApp {


  override def readAndTranform(ssc: StreamingContext): Unit = {

    // TODO 将数据状态保存下来，需要checkpoint
    ssc.checkpoint("./kafkaSource01")

    // 获取数据
    val source: ReceiverInputDStream[String] = ssc.socketTextStream("hadoop101", 9999)

    // 无状态转换
    source.transform {
      rdd =>
        rdd.flatMap(_.split("\\s+")).map((_, 1))
    }
      //      .updateStateByKey((seq: Seq[Int], option: Option[Int]) => Some(option.getOrElse(0) + seq.sum))
      .updateStateByKey((seq: Seq[Int], option: Option[Int]) => Some((0 /: seq) (_ + _) + option.getOrElse(0)))
      .print()
  }
}
