package com.chenzhiling.flink.submitter

import org.apache.flink.client.deployment.executors.RemoteExecutor
import org.apache.flink.client.program.MiniClusterClient.MiniClusterId
import org.apache.flink.client.program.{ClusterClient, MiniClusterClient, PackagedProgram, PackagedProgramUtils}
import org.apache.flink.configuration._
import org.apache.flink.runtime.jobgraph.{JobGraph, SavepointRestoreSettings}
import org.apache.flink.runtime.minicluster.{MiniCluster, MiniClusterConfiguration}

import java.io.File
import java.lang.{Integer => JavaInt}
import java.util.{List => JavaList}
import scala.collection.JavaConversions._
import scala.util.Try
/**
 * Author: CHEN ZHI LING
 * Date: 2022/6/9
 * Description:
 */
object FlinkLocalSubmitter {



  def submit(flinkHome: String, mainClass: String, jar:File,args:JavaList[String]): String = {
    var packageProgram: PackagedProgram = null
    var client: ClusterClient[MiniClusterId] = null

    val flinkConf: Configuration = getFlinkDefaultConfiguration(flinkHome)
    try {
      //构建jobGraph
      val packageProgramJobGraph: (PackagedProgram, JobGraph) =
        getJobGraph(flinkConf, mainClass,args,jar)
      packageProgram = packageProgramJobGraph._1
      val jobGraph: JobGraph = packageProgramJobGraph._2
      //创建local集群
      client = createLocalCluster(flinkConf)
      //提交任务
      val jobId: String = client.submitJob(jobGraph).get().toString
      jobId
    } catch {
      case exception: Exception =>
        throw exception
    } finally {
      if(null != packageProgram) packageProgram.close()
      if(null != client) client.close()
    }
  }


  private[submitter] def getJobGraph(flinkConfig: Configuration,
                                     mainClass: String,
                                     args: JavaList[String],
                                     jarFile: File): (PackagedProgram, JobGraph) = {
    val packagedProgram: PackagedProgram = PackagedProgram
      .newBuilder
      //mainClass入口
      .setEntryPointClassName(mainClass)
      .setJarFile(jarFile)
      .setSavepointRestoreSettings(SavepointRestoreSettings.none())
      .setArguments(args:_*)
      .build()
    val jobGraph: JobGraph = PackagedProgramUtils.createJobGraph(
      packagedProgram,
      flinkConfig,
      1,
      null,
      false
    )
    packagedProgram -> jobGraph
  }




  private[this] def createLocalCluster(flinkConfig: Configuration): MiniClusterClient = {
    val miniCluster: MiniCluster = createMiniCluster(flinkConfig)
    val host: String = "localhost"
    val port: Int = miniCluster.getRestAddress.get.getPort
    flinkConfig
      .set(JobManagerOptions.ADDRESS,host)
      .set[JavaInt](JobManagerOptions.PORT,port)
      .set(RestOptions.ADDRESS, host)
      .set[JavaInt](RestOptions.PORT, port)
      .set(DeploymentOptions.TARGET, RemoteExecutor.NAME)
    new MiniClusterClient(flinkConfig,miniCluster)

  }

  /**
   * 构建miniCluster
   * @param flinkConfig flink配置
   * @return
   */
  private[this] def createMiniCluster(flinkConfig: Configuration): MiniCluster = {
    val numTaskManagers: Int = ConfigConstants.DEFAULT_LOCAL_NUMBER_TASK_MANAGER
    val numSlotsPerTaskManager: Int = flinkConfig.getInteger(TaskManagerOptions.NUM_TASK_SLOTS)
    val miniClusterConfig: MiniClusterConfiguration =
      new MiniClusterConfiguration.Builder()
        .setConfiguration(flinkConfig)
        .setNumTaskManagers(numTaskManagers)
        .setNumSlotsPerTaskManager(numSlotsPerTaskManager)
        .build()
    val cluster = new MiniCluster(miniClusterConfig)
    cluster.start()
    cluster
  }

  /**
   * 根据flinkHome的地址去获取flink-conf.yaml文件
   * 包含所有设置的flink的配置,相当于map
   */
  private[submitter] def getFlinkDefaultConfiguration(flinkHome: String): Configuration = {
    Try(GlobalConfiguration.loadConfiguration(s"$flinkHome/conf")).getOrElse(new Configuration())
  }

}
