package com.sunzm.flink.datastream.scala.sink.jdbc

import com.alibaba.fastjson.JSON
import com.streamxhub.streamx.flink.core.scala.StreamingContext
import com.streamxhub.streamx.flink.core.scala.sink.JdbcSink
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage
import org.apache.flink.streaming.api.CheckpointingMode
import org.slf4j.{Logger, LoggerFactory}
import org.apache.flink.streaming.api.scala._

import java.time.Duration

/**
 * 参考 StreamX中基于两阶段提交实现了 EXACTLY_ONCE (精确一次)语义的JdbcSink,
 * 并且采用光 HikariCP为连接池,让数据的读取和写入更简单更准确
 *
 * @author sunzm
 * @date 2022-01-29
 * @version 1.0
 */
object StreamXJdbcSink {
  private val logger: Logger = LoggerFactory.getLogger(this.getClass.getName.stripSuffix("$"))
  private val isLocal = true

  def main(args: Array[String]): Unit = {

    val parameterTool = ParameterTool.fromArgs(args)

    //1.创建执行的环境
    val env: StreamExecutionEnvironment = if (isLocal) {
      StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
    } else {
      StreamExecutionEnvironment.getExecutionEnvironment
    }

    //如果开启精准一次语义，必须要设置checkpoint

    env.enableCheckpointing(Duration.ofSeconds(5).toMillis, CheckpointingMode.EXACTLY_ONCE)
    val checkpointStorage = new FileSystemCheckpointStorage("file:///D:/data/flink/ck/jdbc/")
    val checkpointConfig = env.getCheckpointConfig
    checkpointConfig.setCheckpointStorage(checkpointStorage)
    env.setStateBackend(new HashMapStateBackend)

    //2.从socket中获取数据
    /*
    {"name":"zs", "age": 17, "gender":1, "address":"beijing"}
     */
    val socketDataStream: DataStream[String] = env.socketTextStream("192.168.1.250", 9999)

    //3.映射成 User 对象
    val userDataStream: DataStream[User] = socketDataStream.filter(line => {
      var isValid = false

      try {
        val jSONObject = JSON.parseObject(line)

        if (jSONObject.containsKey("name") &&
          jSONObject.containsKey("age") &&
          jSONObject.containsKey("gender") &&
          jSONObject.containsKey("address")) {

          isValid = true
        }
      } catch {
        case e: Throwable => logger.error(s"数据格式异常: ${line}", e)
      }

      isValid
    }).map(line => {
      val jSONObject = JSON.parseObject(line)

      val name = jSONObject.getString("name")
      val age = jSONObject.getIntValue("age")
      val gender = jSONObject.getIntValue("gender")
      val address = jSONObject.getString("address")

      User(name, age, gender, address)
    })

    val hikariParameter = ParameterTool.fromPropertiesFile("flink-demo/src/main/resources/hikari.properties")

    val streamingContext = new StreamingContext(hikariParameter, env)

    //alias用来知道配置文件字段名的前缀,如果不写就是jdbc,例如jdbc.jdbcUrl
    JdbcSink(
      parallelism = 1,
      alias = "",
      name = "jdbcSink",
      "jdbcSink-20220129001"
    )(streamingContext).sink[User](userDataStream)(user =>
      s"""
         |insert into t_user(`name`,`age`,`gender`,`address`)
         |value('${user.name}',${user.age},${user.gender},'${user.address}')
         |""".stripMargin
    )

    //5.执行
    env.execute(this.getClass.getSimpleName.stripSuffix("$"))

  }

  case class User(name: String, age: Int, gender: Int, address: String)
}
