package com.zyx.flinkdemo.stream.param;


import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.util.Set;

/**
 * @author zyx
 * @since 2021/5/21 20:09
 * desc: ParameterTool类相关案例
 * reference:
 *  https://ci.apache.org/projects/flink/flink-docs-release-1.13/zh/docs/dev/datastream/application_parameters/
 */
@Slf4j
public class ParameterToolDemo {
    public static void main(String[] args) throws Exception {
        getArgsProp(args);
    }

    /**
     * 读取提交Job时的参数
     */
    private static void getArgsProp(String[] args) throws Exception {
        // 1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 2.读取提交Job时传递的参数
        // 该参数通过 --xx指定, 例如  --input hdfs:///mydata --elements 42
        // 该参数在提交的Jar包 之后 指定 !!!
        ParameterTool params = ParameterTool.fromArgs(args);
        String input = params.get("testinput", "hehehe");
        log.info("++++++++++++++++++++ input >> {}", input);
        // 3.创建KafkaCondumer
        Properties prop = new Properties();
        String kafkaServer = "192.168.31.201:9092,192.168.31.202:9092,10.2.13.203:9092";
        prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer);
        prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "wc_consumer_21052202");
        prop.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>("kafka_topic_wc", new SimpleStringSchema(), prop);
        // 4.添加source数据
        DataStreamSource<String> kafkaSource = env.addSource(kafkaConsumer);
        // 5.对数据进行处理并输出结果
        kafkaSource.map(new ParamToolMapFunction(input)).print();

        // 6.执行环境
        env.execute();
    }


    /**
     * 读取系统参数
     *  该参数在提交Job时可以通过 -D指定 例如 -DinputDir=hdfs:///xxxxxx
     *  该参数在提交的Jar包 之前 指定
     */
    private static void getSystemProp() {
        ParameterTool systemProp = ParameterTool.fromSystemProperties();
        Properties properties = systemProp.getProperties();
        Set<String> keys = properties.stringPropertyNames();
        for (String key : keys) {
            log.info("++++++++++++++++++++ key => {} value => {}", key, properties.getProperty(key));
        }
    }

    /**
     * 读取配置文件
     */
    private static void getFromPropFile() throws IOException {
        InputStream propInputStream = ParameterToolDemo.class
                .getClassLoader().getResourceAsStream("log4j.properties");
        ParameterTool inputProp = ParameterTool.fromPropertiesFile(propInputStream);
        Properties properties = inputProp.getProperties();
        for (String key : properties.stringPropertyNames()) {
            log.info("++++++++++++++++++++ key => {} value => {}", key, properties.getProperty(key));
        }
    }

    private static class ParamToolMapFunction implements MapFunction<String, String> {

        private final String input;

        public ParamToolMapFunction(String input) {
            this.input = input;
        }

        @Override
        public String map(String value) throws Exception {
            String resultStr = StringUtils.join(value, "===========", input);
            log.info("++++++++++++++++++++ {}", resultStr);
            return resultStr;
        }
    }
}
