package com.autoai

import java.sql.{Connection, DriverManager, PreparedStatement}
import java.util.Properties

import com.autoai.entity.{YarnApp, YarnApplication}
import com.autoai.util.{HttpClientUtils, JsUtils, StringUtiltil}
import net.sf.json.JSONObject
import org.apache.flink.api.common.functions.FilterFunction
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.environment.CheckpointConfig
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.{CheckpointingMode, scala}
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer}


object FlinkToKafka {

  def main(args: Array[String]): Unit = {
    //定义kafka配置信息

    //
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    //定义kafkasource

    //获取checkpoint
    val checkPoint = env.getCheckpointConfig
    //设置10秒执行一下checkpoint
    env.enableCheckpointing(10000)
    //严格执行一次checkpoint 防止数据重复
    checkPoint.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)
    //两个checkpoint执行间隔为5s
    checkPoint.setMinPauseBetweenCheckpoints(5000)
    //如果一分钟一个checkpoint没有执行完则放弃直接执行下一个
    checkPoint.setCheckpointTimeout(60000)
    //设置任务停止的时候自动保存checkpoint
    checkPoint.enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)

    val properties = new Properties()
    properties.setProperty("bootstrap.servers", args(0)) //10.30.23.45:9092
    properties.setProperty("group.id", args(1)) //consumer-group-15
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("auto.offset.reset", "latest")


    val stream: scala.DataStream[String] = env.addSource(new FlinkKafkaConsumer[String](args(3), new SimpleStringSchema(), properties)) //args(3) topic

    val result = stream
      .filter(new FilterFunction[String] {
        override def filter(value: String): Boolean = {
          if (value.contains("RMAppManager$ApplicationSummary")) {
            return true
          }
          false
        }
      }).uid("datafilter001")
      .map(fun = data => {
        println(data)
        val data1 = data.split("appId=")
        //获取applicaionId
        val applicaionId = data1(1).split(",")(0)
        println("::::" + applicaionId)
        val url = args(2) + applicaionId //http://tsp-namenode-01:8088/ws/v1/cluster/apps/
        //获取url返回的数据Json
        val response = HttpClientUtils.get(url)
        println(response)
        //获取app内的json数据
        val app = JSONObject.fromObject(response).get("app").toString
        app
      }).uid("getappdata001")
    result.addSink(new FlinkKafkaProducer[String](args(0), args(4), new SimpleStringSchema())).uid("flinktokafka001") //  args(0) brokerlist         args(4) topic
    env.execute(args(5)) //job name
  }
}

//class MyJDBCSinkFuc extends RichSinkFunction[YarnApplication] {
//
//  //定义连接、预编译语句
//  var conn: Connection = _
//  var insertStmt: PreparedStatement = _
//  var updateStmt: PreparedStatement = _
//
//  override def invoke(value: YarnApplication, context: SinkFunction.Context[_]): Unit = {
//    insertStmt.setInt(1, 1)
//    insertStmt.setString(2, value.getName)
//    insertStmt.setString(3, value.getName)
//    insertStmt.execute()
//  }
//
//  override def close(): Unit = {
//    insertStmt.close()
//    updateStmt.close()
//    conn.close()
//  }
//
//  override def open(parameters: Configuration): Unit = {
//    conn = DriverManager.getConnection("jdbc:mysql://10.30.23.48:3306/monitor", "root", "123456")
//    insertStmt = conn.prepareStatement("insert into monitor_test (id,appId,name) values(?,?,?)")
//
//  }
//}
