package org.pucc

import org.apache.flink.core.execution.JobClient
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.{EnvironmentSettings, ResultKind, SqlDialect, StatementSet}
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.pucc.bean.ImportConfig
import org.pucc.common.{ClusterConstant, ImportConstant}
import org.pucc.data.DataManager
import org.pucc.handle.MysqlHandle.startMysqlJob
import org.pucc.handle.SqlserverHandle.{executeSqlserverJob, getSqlserverData, startSqlserverJob}

/**
 * 基于Flink的数据导入程序
 */
object DataImport {

  def main(args: Array[String]): Unit = {
    val fsSettings = EnvironmentSettings.newInstance.build
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    // 执行时的状态存放的地方（实时），状态后端只有内存和Rocks
    //  env.setStateBackend(new HashMapStateBackend())

    // checkpoint存放的地方（备份），如果不设置应该是存放在JobManager的内存中
    env.getCheckpointConfig.setCheckpointStorage(s"${ClusterConstant.HDFSMeta.URL}/hudi/ck/data_import")

    // 指定发送barrier的时间间隔（两次checkpoint之间的间隔）
    env.enableCheckpointing(10000)

    // 指定checkpoint超时时间（若超时时间大于两次checkpoint的间隔，将会同时出现多个checkpoint任务）
    // 所有，可以指定最多有多少个checkpoint任务
    env.getCheckpointConfig.setCheckpointTimeout(10000)
    env.getCheckpointConfig.setMaxConcurrentCheckpoints(2)

    env.setParallelism(1)

    val tableEnv: StreamTableEnvironment = StreamTableEnvironment.create(env, fsSettings)

    tableEnv.getConfig.setSqlDialect(SqlDialect.DEFAULT)

    val list = DataManager.getImportConfig(database = "flinkcdc", table = "SQL_MAPPERING")

    var ss: StatementSet = tableEnv.createStatementSet()
    var n = 0
    list.foreach {
      e =>{
        e.sourceType match {
          case ImportConstant.TABLE_TYPE_MYSQL =>
            env.getCheckpointConfig.setCheckpointStorage(s"hdfs://192.168.0.219:9820/hudi/ck/${e.sinkTableName}")
            startMysqlJob(tableEnv,ss,e)
          case ImportConstant.TABLE_TYPE_SQLSERVER =>
            startSqlserverJob(tableEnv,ss,e)
        }
//        if( n == 0){
//          ss.execute()
//          ss = tableEnv.createStatementSet()
//          n = 0
//        }
      }
    }
    ss.execute().print()

  }




}
