package com.fulu.game.bigdata.realtime

import java.io.File

import com.fulu.game.bigdata.realtime.config.Constants
import com.fulu.game.bigdata.realtime.entity.Job
import com.fulu.game.bigdata.realtime.sink.{ElasticsearchSinkBuilder, HbaseSink, KafkaSinkBuilder, MySqlSink}
import com.fulu.game.bigdata.realtime.source.table.TableUtils
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend
import org.apache.flink.streaming.api.{CheckpointingMode, TimeCharacteristic}
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.scala.{StreamTableEnvironment, _}
import org.apache.flink.table.catalog.hive.HiveCatalog
import org.apache.flink.table.module.hive.HiveModule
import org.apache.flink.types.Row
import org.apache.flink.util.FileUtils


object Execute {

  def main(args: Array[String]): Unit = {

    if (args.length < 1) {
      println("please input job file path")
      System.exit(-1)
    }

    val jobFilePath = args(0)

    val job = Job(FileUtils.readFile(new File(jobFilePath), "utf8"))

    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val bsSettings = EnvironmentSettings.newInstance.useBlinkPlanner.inStreamingMode.build
    val tableEnv = StreamTableEnvironment.create(env, bsSettings)

    env.enableCheckpointing(180000)
    env.getCheckpointConfig.enableExternalizedCheckpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)
    env.setStateBackend(new RocksDBStateBackend(Constants.CHECKPOINT_DIR + job.getJobName))
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    val hive = new HiveCatalog("hive", "default", "/etc/hive/conf", "1.2.1")
    tableEnv.registerCatalog("hive", hive)
    tableEnv.useCatalog("hive")
    tableEnv.useDatabase("default")
    tableEnv.loadModule("myHive", new HiveModule("1.2.1")) // 加载 HiveModule, 以使用 Hive 内置函数

    job.getSources.forEach(table => TableUtils.createTable(env, tableEnv, table))

    job.getOperators.forEach(operator => {

      val result = tableEnv.sqlQuery(operator.getSql)

      operator.getSinks.forEach(sink => {

        sink.getSinkType match {
          case "mysql" => tableEnv.toRetractStream[Row](result).addSink(new MySqlSink(sink))
          case "flink" => tableEnv.createTemporaryView(sink.getSinkTable, result)
          case "kafka" => tableEnv.toRetractStream[Row](result).addSink(KafkaSinkBuilder.createKafkaSink(sink))
          case "hbase" => tableEnv.toRetractStream[Row](result).addSink(new HbaseSink(sink))
          case "elasticsearch" => tableEnv.toRetractStream[Row](result).addSink(ElasticsearchSinkBuilder.createElasticSearchSink(sink))
          case _ => print("sink not exists")
        }
      })
    })

    tableEnv.execute(job.getJobName)
  }
}
