package com.fwmagic.flink.projectcase.utils;

import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;

import java.util.Arrays;
import java.util.List;
import java.util.Properties;

/**
 * 自定义Flink工具
 */
public class FlinkUtils {

    private static StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

    /**
     * 创建KafkaSource
     * @param parameterTool
     * @param clazz
     * @param <T>
     * @return
     * @throws Exception
     */
    public static <T> DataStream<T> createKafkaSource(ParameterTool parameterTool, Class<? extends DeserializationSchema<T>> clazz) throws Exception {
        //设置全局参数
        env.getConfig().setGlobalJobParameters(parameterTool);
        //env.setParallelism(4);
        //开启checkpoint,同时开启重启策略
        env.enableCheckpointing(parameterTool.getLong("checkointInterval",5000L), CheckpointingMode.EXACTLY_ONCE);

        env.setStateBackend(new FsStateBackend("file:///Users/fangwei/Downloads/chk"));
        //取消任务checkpoint不删除
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        Properties prop = new Properties();
        prop.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers"));
        prop.setProperty("group.id", parameterTool.get("group.id", "gp11"));
        //从最早的数据开始消费
        prop.setProperty("auto.offset.reset", parameterTool.get("auto.offset.reset", "earliest"));
        //kafka的消费者不自动提交偏移量
        prop.setProperty("enable.auto.commit", parameterTool.get("enable.auto.commit", "false"));

        String topics = parameterTool.getRequired("topics");
        List<String> topicList = Arrays.asList(topics.split(","));

        FlinkKafkaConsumer011<T> kafkaSource = new FlinkKafkaConsumer011<T>(topicList,
                clazz.newInstance(),
                prop);
        DataStreamSource<T> dataStream = env.addSource(kafkaSource);
        return dataStream;
    }

    public static StreamExecutionEnvironment getEnv() {
        return env;
    }

    public static void main(String[] args) throws Exception {
        /*
        //通过ParameterTool读取参数，或者从配置文件中读取参数
        ParameterTool parameterTool = ParameterTool.fromArgs(args);
        System.out.println(parameterTool.get("host","localhost"));
        System.out.println(parameterTool.getRequired("topic"));
        System.out.println(parameterTool.getInt("port",8080));

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);
        System.out.println(parameterTool.getRequired("bootstrap.servers"));
        System.out.println(parameterTool.getRequired("topics"));
        System.out.println(parameterTool.getRequired("group.id"));
        System.out.println(parameterTool.get("auto.offset.reset"));
        System.out.println(parameterTool.get("enable.auto.commit"));
        */

    }
}
