package com.nekonomics.lakehouse.common

import org.apache.flink.api.common.JobExecutionResult
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.configuration.Configuration
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
import org.apache.flink.streaming.api.environment.{StreamExecutionEnvironment => JavaEnv}
import org.apache.flink.streaming.api.environment.{StreamExecutionEnvironment => JavaEnv}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

import util.SqlUtil._

object FlinkRunner {

  case class RunnerConfig(groupId: String, topic: String, port: Int, parallelism: Int = 4, ckp: Boolean = false)
  case class SqlRunnerConfig(groupId: String, port: Int, parallelism: Int = 4, ckp: Boolean = false)

//
//  /**
//   * 从kafka中读取流， 并使用回调函数处理
//   *
//   * @param handler 回调函数， 用户的执行流
//   * @param config  配置信息，包含topic、消费组等
//   * @return
//   */
//  def run(handler: (StreamExecutionEnvironment, DataStream[String]) => Unit)
//         (implicit config: RunnerConfig): JobExecutionResult = {
//
//    System.setProperty("HADOOP_USER_NAME", "leng")
//
//    val webUIConfig = getWebUIConfig(config.port)
//
//    val env = new StreamExecutionEnvironment(JavaEnv.getExecutionEnvironment(webUIConfig))
//
//    env.setParallelism(config.parallelism)
//
//
//    env.setStateBackend(new HashMapStateBackend)
//
//    setCheckpoint(env, config.ckp, config.groupId)
//
//    val src = env.fromSource(
//      FlinkSourceUtil.getKafkaSource(config.topic,config.groupId),
//      WatermarkStrategy.noWatermarks(), "kafka_source"
//    )
//
//
//    handler(env, src)
//    env.execute()
//
//  }


  def envContext(f: (StreamExecutionEnvironment, StreamTableEnvironment) => Unit)(implicit config: SqlRunnerConfig) = {
    val webUIConfig = getWebUIConfig(config.port)

    val env = new StreamExecutionEnvironment(JavaEnv.getExecutionEnvironment(webUIConfig))
    val tableEnv = StreamTableEnvironment.create(env)
    f(env, tableEnv)
  }


  def runSql(f: (StreamExecutionEnvironment, StreamTableEnvironment) => Unit)(implicit config: SqlRunnerConfig) = {

    envContext { (env, tableEnv) =>
      env.setParallelism(config.parallelism)
      env.setStateBackend(new HashMapStateBackend)

      setCheckpoint(env, config.ckp, config.groupId)

      tableEnv.executeSql(
        sql"""
             |CREATE CATALOG my_catalog WITH (
             |    'type' = 'paimon',
             |    'warehouse' = 'hdfs://hadoop01:8020/paimon/fs'
             |);
             |""".stripMargin
      )
      tableEnv.executeSql(sql"USE CATALOG my_catalog;")
      tableEnv.getConfig.getConfiguration.setString("table.exec.sink.upsert-materialize", "NONE")

      //    tableEnv.executeSql(
      //      sql"""
      //           |-- paimon requires checkpoint interval in streaming mode
      //           |SET 'execution.checkpointing.interval' = '10 s';
      //           |""".stripMargin
      //    )

      f(env, tableEnv)
    }

  }


  private def getWebUIConfig(port: Int) = {
    val webUIConfig = new Configuration
    webUIConfig.setInteger("rest.port", port)
    webUIConfig
  }


  private def setCheckpoint(env: StreamExecutionEnvironment, ckp: Boolean, groupId: String): Unit = {
    if (ckp) {

      env.enableCheckpointing(5000L)

      env.getCheckpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)

      env.getCheckpointConfig.setCheckpointStorage("hdfs://hadoop01:8020/nekonomics/ckp/" + groupId)

      env.getCheckpointConfig.setMaxConcurrentCheckpoints(1)

      env.getCheckpointConfig.setMinPauseBetweenCheckpoints(5000)

      env.getCheckpointConfig.setCheckpointTimeout(10000)

      env.getCheckpointConfig.setExternalizedCheckpointCleanup(RETAIN_ON_CANCELLATION)

      env.getCheckpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)

    }
  }







}
