import java.text.SimpleDateFormat
import java.time.Duration
import java.util.Properties

import org.apache.flink.api.common.eventtime.{SerializableTimestampAssigner, WatermarkStrategy}
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.streaming.connectors.redis.RedisSink
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig
import org.apache.flink.streaming.connectors.redis.common.mapper.{RedisCommand, RedisCommandDescription, RedisMapper}
import org.apache.flink.util.Collector

import scala.collection.mutable

object top2userconsumption {
  def main(args: Array[String]): Unit = {
    val env=StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val prop=new Properties()
    prop.setProperty("bootstrap.servers","192.168.100.131:9092")
    prop.setProperty("group.id","T1")
    val map=new mutable.HashMap[String,Double]()

    val stream=env.addSource(new FlinkKafkaConsumer[String]("order",new SimpleStringSchema(),prop).setStartFromEarliest())
    //    stream.print()
    val t1=stream.filter(_.startsWith("==order_info==")).filter(x=>{
      val arr=x.split(",")
      val state=arr(4).toInt
      state!=1003 && state!=1005 && state!=1006
    })
      .assignTimestampsAndWatermarks(WatermarkStrategy.forBoundedOutOfOrderness(Duration.ofSeconds(5)).withTimestampAssigner(
        new SerializableTimestampAssigner[String] {
          override def extractTimestamp(t: String, l: Long): Long = {
            val sdf=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
            val arr=t.split(",")
            val create=sdf.parse(arr(8)).getTime
            val operate=
              if(arr(9).trim.equals(")")) create
              else sdf.parse(arr(9).trim.replace(")","")).getTime()
            math.max(create,operate)
          }
        }
      ))
      .map(x=>{
        val arr=x.split(",")
        val user=arr(5)
        val amount=arr(3).toDouble
        (user,amount)
      }).process(new ProcessFunction[(String,Double),String] {
      override def processElement(i: (String, Double), context: ProcessFunction[(String, Double), String]#Context, collector: Collector[String]): Unit = {
        //        map.get()
        //        map.put()
        if(map.contains(i._1)){
          val money=map.get(i._1).get+i._2
          map.put(i._1,money)
        }else{
          map.put(i._1,i._2)
        }
        val list=map.toList.sortBy(-_._2).take(2).map(x=>{
          s"${x._1}:${x._2}"
        })
        val result=list.mkString("[",",","]")
        collector.collect(result)
      }
    })
    t1.print()
    val redis_config=new FlinkJedisPoolConfig.Builder().setHost("192.168.220.24").setPort(6379).build()
    t1.addSink(new RedisSink[String](redis_config,new RedisMapper[String] {
      override def getCommandDescription: RedisCommandDescription = new RedisCommandDescription(RedisCommand.SET)

      override def getKeyFromData(t: String): String = "top2userconsumption"

      override def getValueFromData(t: String): String = t
    }))











    env.execute
  }
}