package com.yanggu.flink.datastream_api.parametertool

import org.apache.flink.api.common.functions.RichMapFunction
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.configuration.Configuration

/**
 * https://www.jianshu.com/p/3dcbd1b241a1
 * https://nightlies.apache.org/flink/flink-docs-release-1.14/zh/docs/dev/datastream/application_parameters/
 * 聊聊flink的ParameterTool
 */
object ParameterToolDemo {

  def main(args: Array[String]): Unit = {
    //1. 从命令行中获取参数
    //如 --input hdfs:///mydata --elements 42
    val tool1 = ParameterTool.fromArgs(args)
    println(tool1.get("input", "hdfs://mydata"))

    //2. 从JVM参数中获取数据
    //如-Dinput=hdfs:///mydata
    val tool2 = ParameterTool.fromSystemProperties()
    println(tool2.get("input"))

    //3. 从环境变量中获取数据
    val tool3 = ParameterTool.fromMap(System.getenv())
    println(tool3.get("JAVA_HOME"))

    //4. properties文件中获取数据
    //这里必须要写/, 并且需要将文件放在resources文件夹下
    //这里需要使用getResourceAsStream的方式, 而不能使用getPath方式, 不然打成jar包之后执行会报文件找不到的错误
    val tool4 = ParameterTool.fromPropertiesFile(getClass.getResourceAsStream("/test.properties"))
    println(tool4.get("test"))

    //5. 注册全局参数
    val environment = ExecutionEnvironment.getExecutionEnvironment
    environment.getConfig.setGlobalJobParameters(tool1)
    //之后可以在任意的RichFunction中使用
    new RichMapFunction[String, String] {

      override def open(parameters: Configuration): Unit = {
        val parameters1 = getRuntimeContext.getExecutionConfig.getGlobalJobParameters.asInstanceOf[ParameterTool]
        println(parameters1.get("input", "hdfs://hadoop100:9000/abc"))
      }

      override def map(value: String): String = value

    }
  }

}
