package com.yanggu.flink.datastream_api.apollo

import com.ctrip.framework.apollo.ConfigService
import com.yanggu.flink.datastream_api.pojo.SensorReading
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala._

import java.util

//Flink程序使用apollo配置中心步骤
//1. 添加apollo的client依赖
//2. 在resources目录下新建META-INF目录, 新建app.properties文件
//需要配置app.id、apollo.bootstrap.enabled、apollo.bootstrap.namespaces
//3. 编写代码获取Apollo中的所有配置, 封装成Map
//4. 把Map注册到Flink中(ParameterTool)
//5. 在flink提交命令中增加JVM参数
//如何在flink的命令行中添加taskmanager的JVM参数。参考https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/deployment/config/#env-java-opts-taskmanager
//启动命令示例
//./flink run -c com.yanggu.flink.lowlevelapi.apollo.FlinkApolloConfigDemo -Denv.java.opts.taskmanager="-Dapollo.meta=http://rdfa-cfg-configservice.app.localidc.io/ -Denv=PRO" /opt/module/flink-1.14.0/flink-learn-1.0-SNAPSHOT-jar-with-dependencies.jar
object FlinkApolloConfigDemo {

  //从apollo中获取所有的配置, 如果有同名的key, 后面的会覆盖前面的
  private lazy val map: java.util.Map[String, String] = {
    //这里需要使用getResourceAsStream的方式, 而不能使用getPath方式, 不然打成jar包之后执行会报文件找不到的错误
    val tool = ParameterTool.fromPropertiesFile(getClass.getResourceAsStream("/META-INF/app.properties"))
    val namespaces = tool.get("apollo.bootstrap.namespaces", "application")
    val map = new util.HashMap[String, String]
    namespaces.split(",").foreach(namespace => {
      val config = ConfigService.getConfig(namespace)
      val names = config.getPropertyNames
      names.forEach(key => {
        map.put(key, config.getProperty(key, ""))
      })
    })
    println(map)
    map
  }

  def main(args: Array[String]): Unit = {

    val env = StreamExecutionEnvironment.getExecutionEnvironment

    env.getConfig.setGlobalJobParameters(ParameterTool.fromMap(map))

    val stream = env.fromCollection(List(
      SensorReading("sensor_1", System.currentTimeMillis(), 35.8),
      SensorReading("sensor_2", System.currentTimeMillis(), 45.8),
      SensorReading("sensor_3", System.currentTimeMillis(), 65.8)
    ))

    stream.addSink(new RichSinkFunction[SensorReading] {

      override def open(parameters: Configuration): Unit = {
        val tool = getRuntimeContext.getExecutionConfig.getGlobalJobParameters.asInstanceOf[ParameterTool]
        println(tool.get("testKey"))
      }

      override def invoke(value: SensorReading, context: SinkFunction.Context): Unit = {
        println(s"Sink>>>> $value")
      }
    }).setParallelism(1)

    env.execute("FlinkApolloConfigDemo Job")

  }

}
