package com.udf.flink.scala.apitest.state

import com.udf.flink.scala.examples.reduces.User
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.api.scala._
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

object MapWithState {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment

//    val lines = env.socketTextStream("node-01", 7777)
val tableEnv = StreamTableEnvironment.create(env)
    val sql= """CREATE TABLE ordr (
              user_id INT,
              cnt INT,
              a STRING,
              rowtime AS localtimestamp,
              WATERMARK FOR rowtime AS rowtime
              ) WITH (
                'connector' = 'datagen',
              'rows-per-second'='10',
              'fields.user_id.kind'='sequence',
              'fields.user_id.start'='1',
              'fields.user_id.end'='200',
              'fields.cnt.min'='1',
              'fields.cnt.max'='2',
              'fields.a.length'='1'
              )"""
    tableEnv.executeSql(sql)
//    val keyed = lines.flatMap(_.split(" ")).map(word => (word, 1)).keyBy(0)
    val tb=tableEnv.sqlQuery("""select user_id,cnt,a ,cnt as rowct from ordr""")
    //    val tb=tableEnv.from("ordr").addOrReplaceColumns($"rowtime", concat($("a"), "sunny").as("a"))
    val keyed=tableEnv.toDataStream(tb,classOf[User]).map(user => (user.a,1) ).keyBy(_._1)
    val summed = keyed.mapWithState((input: (String, Int), state: Option[Int]) => {
      state match {
        case Some(count) => {
          val key = input._1
          val value = input._2
          val total = count + value
          ((key, total), Some(total))
        }
        case None => {
          (input, Some(input._2))
        }
      }
    })

    summed.print()

    env.execute()
  }
}
