package com.navinfo.platform.trip.analysis.flink.config;

import com.navinfo.platform.trip.analysis.flink.TripAnalysisStreaming;
import com.navinfo.platform.trip.common.enums.SystemTypeEnum;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.DynamicCodeLoadingException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URISyntaxException;

/**
 * 系统的整体配置
 * @author: web
 * @data: 2020-04-07
 **/
public class SystemConfig {
    private static final Logger logger = LoggerFactory.getLogger(TripAnalysisStreaming.class);

    /**
     * 配置文件的文件夹名
     */
    private static final String CONFIG_PATH = "config";

    /**
     * 系统运行环境的key
     */
    public static final String systemTypeKey = "system.type";
    /**
     * 行程最小时长的key
     */
    public static final String tripMinDurationKey = "trip.min.duration";

    /**
     * 系统配置初始化
     */
    public static StreamExecutionEnvironment init(ParameterTool config) throws IOException, DynamicCodeLoadingException {
        //检查系统的配置参数是否合法
        int systemType = config.getInt(systemTypeKey, 0);
        if(0 == systemType){
            logger.error("请配置实时驾驶行为分析系统类型"+systemTypeKey+"：1东风，2青汽，3一汽");
            System.exit(1);
        }

        TripAnalysisStreaming.systemType = SystemTypeEnum.valueOf(systemType);
        logger.info("实时驾驶行为分析，系统类型：{},{}", TripAnalysisStreaming.systemType.getType(), TripAnalysisStreaming.systemType.getDesc());

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // CheckPoint设置
        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        checkpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        checkpointConfig.setCheckpointInterval(Time.minutes(5).toMilliseconds());
        checkpointConfig.setCheckpointTimeout(Time.minutes(5).toMilliseconds());
        checkpointConfig.setMinPauseBetweenCheckpoints(Time.minutes(1).toMilliseconds());
        checkpointConfig.setPreferCheckpointForRecovery(true);
        checkpointConfig.enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        //StateBackend配置，如果hdfs权限有问题，需设置系统环境变量HADOOP_USER_NAME=hdfs
//		Configuration stateConf = new Configuration();
//		stateConf.setString(CheckpointingOptions.STATE_BACKEND, "rocksdb");
//		stateConf.setBoolean(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, true);
//		stateConf.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, "hdfs:///flink/checkpoints/tripanalysis");
//		stateConf.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, "hdfs:///flink/savepoints/tripanalysis");
//		env.setStateBackend(StateBackendLoader.loadStateBackendFromConfig(stateConf, ClassLoader.getSystemClassLoader(), logger));

//        DefaultConfigurableOptionsFactory dbOptionsFactory = new DefaultConfigurableOptionsFactory();
//        //state.backend.rocksdb.block.blocksize：每个Block打包的用户数据大小，默认’4KB‘，增大会减少内存使用，影响读性能；
//        dbOptionsFactory.setBlockSize("4KB");
//        //state.backend.rocksdb.block.cache-size：整个db中block cache的大小，默认值‘8MB’，如果存在重复读的场景，建议调大；
//        dbOptionsFactory.setBlockCacheSize("8MB");
//        //state.backend.rocksdb.writebuffer.count：每个column family的write buffer数目，默认值 ‘2‘. 如果有需要可以适当调大
//        dbOptionsFactory.setMaxWriteBufferNumber(2);
//        //state.backend.rocksdb.writebuffer.size：每个write buffer的size，默认值‘64MB‘. 对于写频繁的场景，建议调大；
//        dbOptionsFactory.setWriteBufferSize("64MB");
//        //state.backend.rocksdb.writebuffer.number-to-merge:写之前的write buffer merge数量，默认‘1‘.建议调大；
//        dbOptionsFactory.setMinWriteBufferNumberToMerge(1);
//        //state.backend.rocksdb.files.open：每个TaskManager中DB可打开的文件数，-1为无限制，默认5000
//        dbOptionsFactory.setMaxOpenFiles(5000);
//        //state.backend.rocksdb.thread.num：后台flush和compaction的线程数. 默认值 ‘1‘. 建议调大
//        dbOptionsFactory.setMaxBackgroundThreads(1);
//        dbOptionsFactory.setUseDynamicLevelSize(false);


        //执行参数配置
        ExecutionConfig executionConfig = env.getConfig();
        executionConfig.setMaxParallelism(500);
        executionConfig.setUseSnapshotCompression(true);
        //executionConfig.setParallelism(30);
        //executionConfig.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, Time.of(10, TimeUnit.SECONDS)));
        //设置缓冲区填充的最长等待时间。当超过等待时间，即使缓冲区未满，也会自动发送缓冲区。此超时的默认值为100毫秒。
        //env.setBufferTimeout(100);

        //时间特性配置
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        //注册PB数据的序列化器
        //env.getConfig().registerTypeWithKryoSerializer(RealTimeDataPb.RealTimeData.class, ProtobufSerializer.class);
        //env.getConfig().registerTypeWithKryoSerializer(LCLocationData.LocationData.class, ProtobufSerializer.class);

        return env;
    }
    /**
     * 加载Flink的配置文件<br/>
     * 并使用命令行中传入的参数覆盖配置文件中的值<br/>
     * 命令行通过--key或-key value传值
     * @param args 命令行参数
     */
    public static ParameterTool load(String[] args) throws URISyntaxException, IOException {
        return loadJarPathConfig(args, "config.properties");
    }

    /**
     * 加载ClassPath根目录的配置文件<br/>
     * 并使用命令行中传入的参数覆盖配置文件中的值<br/>
     * 命令行通过--key或-key value传值
     * @param args 命令行参数
     * @param filePath  文件目录
     * @return filePath文件的配置
     */
    public static ParameterTool loadClassPathConfig(String[] args, String filePath) throws URISyntaxException, IOException {
        filePath = !CONFIG_PATH.equals("") ? CONFIG_PATH+ File.separator + filePath : filePath;
        ParameterTool fileParameter = ParameterTool.fromPropertiesFile(Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath));
        ParameterTool argsParameter = ParameterTool.fromArgs(args);

        //命令行中的配置覆盖file中的配置
        ParameterTool parameterTool = fileParameter.mergeWith(argsParameter);

        logger.info("加载[{}]的配置开始......",filePath);
        parameterTool.toMap().forEach((s, s2) -> logger.info(">>>{}--{}", s, s2));
        logger.error("加载[{}]的配置结束......",filePath);
        return parameterTool;

    }

    /**
     * 加载当前Jar包同目录的配置文件<br/>
     * 并使用命令行中传入的参数覆盖配置文件中的值<br/>
     * 命令行通过--key或-key value传值
     * @param args 命令行参数
     * @param filePath  文件目录
     * @return filePath文件的配置
     */
    public static ParameterTool loadJarPathConfig(String[] args, String filePath) throws URISyntaxException, IOException {
        filePath = !CONFIG_PATH.equals("") ? CONFIG_PATH+ File.separator + filePath : filePath;
        File directory = new File(TripAnalysisStreaming.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath()).getParentFile();
        ParameterTool fileParameter = ParameterTool.fromPropertiesFile(new FileInputStream(directory.getAbsolutePath() + File.separator + filePath));
        ParameterTool argsParameter = ParameterTool.fromArgs(args);

        //命令行中的配置覆盖file中的配置
        ParameterTool parameterTool = fileParameter.mergeWith(argsParameter);

        logger.info(">>>加载[{}]的配置开始:>>>>>>",filePath);
        parameterTool.toMap().forEach((s, s2) -> logger.info(">>>>>>{}----{}", s, s2));
        logger.info(">>>加载[{}]的配置结束：>>>>>>",filePath);
        return parameterTool;
    }


//    public void loadTile() {/**
//     * 加载瓦片信息
//     */
        //加载瓦片信息
        //是否加载瓦片数据
//        boolean loadTileData = Boolean.valueOf(config.getBoolean("load.data.tile.open", false));
//        logger.error("loadTileData:{}", loadTileData);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//
//      final Type[] types = {
//				Types.primitive(PrimitiveType.PrimitiveTypeName.INT64, Type.Repetition.OPTIONAL).as(OriginalType.INT_64).named("tile_id"),
//				Types.primitive(PrimitiveType.PrimitiveTypeName.INT32, Type.Repetition.OPTIONAL).as(OriginalType.INT_32).named("lc01_id"),
//				Types.primitive(PrimitiveType.PrimitiveTypeName.INT32, Type.Repetition.OPTIONAL).as(OriginalType.INT_32).named("lc02_id")
//		};
//		;
//		DataStreamSource<Map> outDataStreamSource = env.readFile(new ParquetMapInputFormat(new Path("hdfs://platform-010-030-050-032:8020/apps/hive/warehouse/test.db/navinfo_tile_lc_mapping"), new MessageType("a",types)) {
//
//
//		}, "hdfs://platform-010-030-050-032:8020/apps/hive/warehouse/test.db/navinfo_tile_lc_mapping");
//
//        MapStateDescriptor<Long, Integer> registerDesc = new MapStateDescriptor<>("register", BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
//		BroadcastStream<Map> broadcast = outDataStreamSource.broadcast(registerDesc);
//        SingleOutputStreamOperator<OuterStatisticData> tripOutput = sourceRealtime.keyBy(value -> value.getTid()).connect(broadcast).process(new KeyedBroadcastProcessFunction<Long, PointProtocol, Map, OuterStatisticData>() {
//            @Override
//            public void processElement(PointProtocol value, KeyedBroadcastProcessFunction.ReadOnlyContext ctx, Collector<OuterStatisticData> out) throws Exception {
//                ReadOnlyBroadcastState<Long, Integer> broadcastState = ctx.getBroadcastState(registerDesc);
//                broadcastState.immutableEntries().forEach(longIntegerEntry -> System.out.println(longIntegerEntry.getKey()));
//
//            }
//
//            @Override
//            public void processBroadcastElement(Map value, Context ctx, Collector<OuterStatisticData> out) throws Exception {
//                BroadcastState<Long, Integer> broadcastState = ctx.getBroadcastState(registerDesc);
//                broadcastState.put((long) value.get("tile_id"), (int) value.get("lc01_id"));
//            }
//        });

//        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build();
//        BatchTableEnvironment tableEnv = BatchTableEnvironment.create(ExecutionEnvironment.getExecutionEnvironment());
//
//        Catalog catalog = new HiveCatalog("myhive", "test", "E:\\ideaIU\\IdeaProjects\\navinfo-location-bigdata\\flink-applications\\opentsp-location-tripanalysis-flink\\target\\classes", "1.2.1");
//        tableEnv.registerCatalog("myhive", catalog);
//        tableEnv.useCatalog("myhive");
//        tableEnv.useDatabase("test");
//        List<String> test = catalog.listTables("test");
//        Table navinfo_tile_lc_mapping = tableEnv.from("navinfo_tile_lc_mapping");
//        navinfo_tile_lc_mapping.printSchema();
//        DataSet<Object> objectDataSet = tableEnv.toDataSet(navinfo_tile_lc_mapping, null);
//        objectDataSet.collect();

//    }


}
