package com.autoai

import akka.remote.WireFormats.TimeUnit
import org.apache.flink.api.common.restartstrategy.RestartStrategies
import org.apache.flink.api.common.time.Time
import org.apache.flink.runtime.executiongraph.restart.RestartStrategy
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

object KafkaToMysql {

  def main(args: Array[String]): Unit = {

    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val tEnv: StreamTableEnvironment = StreamTableEnvironment.create(env)
    //env.setRestartStrategy(RestartStrategies.fallBackRestart())

    //    val sour+ceKafka = "CREATE TABLE FlinkYarnData(id String) WITH (\n 'connector' = 'kafka',\n 'topic' = 'yarn_logs_kafka2mysql_format',\n 'properties.bootstrap.servers' = '10.30.23.45:9092',\n 'properties.group.id' = 'groupid-1',\n 'format' = 'json',\n 'json.ignore-parse-errors' = 'true');  "
    val sourceKafka = "CREATE TABLE FlinkYarnData(\n id STRING, \n`user` STRING,\nname STRING,\nqueue STRING,\nstate STRING,\nfinalStatus STRING,\nprogress STRING,\ntrackingUI STRING,\ntrackingUrl STRING,\ndiagnostics STRING,\nclusterId STRING,\napplicationType STRING,\napplicationTags STRING,\nstartedTime bigint,\nfinishedTime bigint,\nelapsedTime STRING,\namContainerLogs STRING,\namHostHttpAddress STRING,\nallocatedMB STRING,\nallocatedVCores STRING,\nreservedMB STRING,\nreservedVCores STRING,\nrunningContainers STRING,\nmemorySeconds int,\nvcoreSeconds int,\npreemptedResourceMB STRING,\npreemptedResourceVCores STRING,\nnumNonAMContainerPreempted STRING,\nnumAMContainerPreempted STRING,\nlogAggregationStatus STRING \n)" +
      " WITH (\n 'connector' = 'kafka',\n " +
      "'topic' = '" + args(0) + "',\n " +
      "'properties.bootstrap.servers' = '" + args(1) + "',\n " +
      "'properties.group.id' = '" + args(2) + "',\n " +
      "'format' = 'json',\n " +
      "'json.ignore-parse-errors' = 'true')"

    val sinkMysql = "CREATE TABLE yarnapplication_monitor_data(\n id string,\n `user` string,\n name string, \n queue string,\n state string,\n finalStatus string , \n trackingUrl string, \n applicationType string , \n startedTime string , \n finishedTime string, \n lastTime bigint,\n memorySeconds int,\n vcoreSeconds int\n) " +
      "WITH (\n 'connector' = 'jdbc',\n " +
      "'url' = '" + args(3) + "',\n " +
      "'table-name' = '" + args(4) + "',\n " +
      "'username' = '" + args(5) + "',\n " +
      "'password' = '" + args(6) + "')"
    val insertSql = "insert into yarnapplication_monitor_data \nselect \nid, \nuser,\nname,\nqueue,\nstate,\nfinalStatus,\ntrackingUrl,\napplicationType,\nFROM_UNIXTIME(startedTime/1000),\nFROM_UNIXTIME(finishedTime/1000),\n(finishedTime-startedTime) as lastTime,\nmemorySeconds,\nvcoreSeconds\nfrom FlinkYarnData"
    tEnv.executeSql(sourceKafka)
    tEnv.executeSql(sinkMysql)
    tEnv.executeSql(insertSql)

  }
}
