package com.sunzm.flink.datastream.scala.robotcall

import java.util.Properties

import com.alibaba.fastjson.{JSON, JSONObject}
import com.sunzm.flink.datastream.scala.beans.{RobotCall, RobotCallFeeResult}
import org.apache.commons.lang3.time.DateFormatUtils
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.common.state.MapStateDescriptor
import org.apache.flink.streaming.api.datastream.BroadcastStream
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.slf4j.{Logger, LoggerFactory}

object RobotCallFeeJob {
  private val logger: Logger = LoggerFactory.getLogger(this.getClass.getName.stripSuffix("$"))
  private val isLocal = true

  //定义一个状态描述器
  val userStateDescriptor: MapStateDescriptor[Int, (Double, Double, Double, Double)] = new MapStateDescriptor[Int, (Double, Double, Double, Double)](
    "userBroadcastState",
    classOf[Int],
    classOf[(Double, Double, Double, Double)])

  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = if (isLocal) {
      StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
    } else {
      StreamExecutionEnvironment.getExecutionEnvironment
    }

    //自定义MySQL数据源，从MySQL中查询费率数据
    val mysqlDS: DataStream[String] = env.addSource(new RobotCallDataSourceMySQL)

    //如果有读取mysql binlog的权限,可以直接借助 flink-connector-mysql-cdc 来捕获变化的数据
    //数据没有权限的话,就需要从kafka去获取变化的数据（kafka中的数据可能是canal发送过去的,也可能是业务端同事修改数据库记录的时候发过去的）
    //定义kafka相关配置
    val properties: Properties = new Properties
    properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "82.156.210.70:9093")
    properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "flink-test")
    properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest")

    //消费费率数据变化
    val robotFeeConsumer = new FlinkKafkaConsumer[String]("fee-rate-change", new SimpleStringSchema(), properties)
    robotFeeConsumer.setStartFromGroupOffsets()
    robotFeeConsumer.setCommitOffsetsOnCheckpoints(true)

    val robotFeeRateDataStream: DataStream[String] = env.addSource(robotFeeConsumer)
      .uid("robotCallFeeRate-kafkaSource-20210805001")

    val updateDS: DataStream[String] = robotFeeRateDataStream.process(new RobotCallFeeRateChangeProcessFunction)

    //合流
    val feeDS: DataStream[String] = mysqlDS.union(updateDS)

    //把数据进行广播
    val userBroadcastStream: BroadcastStream[String] = feeDS.broadcast(userStateDescriptor)

    //从kafka数据源消费通话记录数据
    val dataStream: DataStream[String] = env.addSource(new FlinkKafkaConsumer[String]("feeTopic", new SimpleStringSchema, properties))
    val robotCallDS: DataStream[RobotCall] = dataStream.map(line => {
      val jSONObject: JSONObject = JSON.parseObject(line)

      val callID: String = jSONObject.getString("callID")
      val feeDuration: Int = jSONObject.getIntValue("feeDuration")
      val startTime: Long = jSONObject.getLongValue("startTime")
      val companyId: String = jSONObject.getString("companyId")
      val numberType: Int = jSONObject.getIntValue("numberType")
      val direction: Int = jSONObject.getIntValue("direction")
      val callWay: Int = jSONObject.getIntValue("callWay")
      val startDate: String = DateFormatUtils.format(startTime, "yyyy-MM-dd")
      RobotCall(callID, feeDuration, startDate, companyId, numberType, direction, callWay)
    })

    val keyedDS: KeyedStream[RobotCall, String] = robotCallDS.keyBy(line => {
      line.callID
    })

    val bdConnectedDS: BroadcastConnectedStream[RobotCall, String] = keyedDS.connect(userBroadcastStream)

    val callLogFeeDS: DataStream[RobotCallFeeResult] = bdConnectedDS.process(new RobotCallFeeKeyedBroadcastProcessFunction)

    callLogFeeDS.print()

    //5.执行
    env.execute(this.getClass.getSimpleName.stripSuffix("$"))
  }


}
