package cn.gwm.flink.streaming.task;

import cn.gwm.flink.streaming.beans.GwmKafkaJson;
import cn.gwm.flink.streaming.constant.ChargePredictFields;
import cn.gwm.flink.streaming.constant.DefaultConstant;
import cn.gwm.flink.streaming.function.map.ChargePredictResStandardMapFunction;
import cn.gwm.flink.streaming.function.map.ChargePredictSrcStandardMapFunction;
import cn.gwm.flink.streaming.function.source.ChargePredictMysqlSourceFunction;
import cn.gwm.flink.streaming.sink.kafka.FlinkKafkaUtil;
import cn.gwm.flink.streaming.strategy.cloudpredict.CloudPredictProcessFunction;
import cn.gwm.flink.streaming.strategy.cloudpredict.LongLat;
import cn.gwm.flink.streaming.strategy.vehiclepredict.ChargePredictProducer;
import cn.gwm.flink.streaming.strategy.vehiclepredict.DwsStrConstant;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.typeutils.MapTypeInfo;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.core.fs.Path;
import org.apache.flink.runtime.checkpoint.CheckpointFailureManager;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;

import java.io.File;
import java.util.List;
import java.util.Map;
import java.util.Properties;

/**
 * @Author: GW00280745
 * @Date: 2023/5/9 15:20
 * @Description: <云端预测充电时长计算_V1.0.pdf> 相关功能实现
 *
 *  kafka-topics --zookeeper bd-zcpt-bd-zcpt-test-000001:2181  --topic dws_multidimensional_obc_predict_source --create --replication-factor 1 --partitions 3
 *
 *  kafka-topics --zookeeper bd-zcpt-bd-zcpt-test-000001:2181  --topic dws_multidimensional_obc_predict_target --alter --partitions 3
 *
 *  kafka-topics --zookeeper bd-zcpt-bd-zcpt-test-000001:2181  --topic dws_multidimensional_obc_predict_target --delete
 *
 *  kafka-configs --zookeeper bd-zcpt-bd-zcpt-test-000001:2181 --alter --entity-name dws_multidimensional_obc_predict_source --entity-type topics --add-config retention.ms=1039228928
 *
 *  kafka-configs --zookeeper bd-zcpt-bd-zcpt-test-000001:2181 --describe --entity-name dws_multidimensional_obc_predict_source --entity-type topics
 *
 *  /usr/bin/kafka-producer-perf-test --producer-props bootstrap.servers=bd-zcpt-bd-zcpt-test-000003:9092 --topic dws_charge_predict --num-records 2000 --throughput 500 --payload-file /server/hives/mcp/bb
 *
 *  工具消费进度 kafka-consumer-groups --bootstrap-server bd-zcpt-bd-zcpt-test-000001:9092  --describe --group ZC-GROUP-TEST-bd-tool-3
 */
public class CloudPredictDwsTask {


    /**
     * @description:  1-内网地址 2-阿里云地址 3-内网验证 4- 本地debug
     *
     * private String cpDir = "hdfs://bd-zcpt-bd-zcpt-test-000002:8020/external/data/dws/dws_multidimensional_charge_cp"
     * private String cpDir = "hdfs://nameservice1/external/data/dws/dws_multidimensional_charge_cp";
     **/
    private String cpDir = "hdfs://bd-zcpt-bd-zcpt-test-000007:8020/external/data/dws/dws_multidimensional_charge_cp";
    /**
     * @description:  充电明细主题
     **/
    private String dwmTopic = "dws_multidimensional_obc_predict_source";
    /**
     * @description:
     **/
    private String dwsTopic = "dws_multidimensional_obc_predict_target";
//    private String dwsTopic = "dws_multidimensional_obc_predict_target_1"

    public static void main(String[] args) throws Exception{
        CloudPredictDwsTask estimateDwsTask = new CloudPredictDwsTask();
        ParameterTool parameterTool = ParameterTool.fromArgs(args);
        estimateDwsTask.topicSettings(parameterTool);
        //start. 加载配置(静态配置+动态配置)
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        if(parameterTool.has(estimateDwsTask.innerParamP)){env.setParallelism(parameterTool.getInt(estimateDwsTask.innerParamP,1));}
        estimateDwsTask.checkPoint(env);
        BroadcastStream<JSONObject> dynamicConfInfo = estimateDwsTask.dynamicConfInfo(env);
        //1. 数据源标准化
        DataStream<JSONObject> srcDataStream = estimateDwsTask.sourceStd(env);
//        DataStream<JSONObject> srcDataStream = estimateDwsTask.readFile(env);
        //临时处理方案 关联经纬度
        srcDataStream = estimateDwsTask.longLat(srcDataStream);
        //2. 数据处理
        DataStream<JSONObject> processDataStream = srcDataStream
                .keyBy((KeySelector<JSONObject, Object>) value -> value.getString(ChargePredictFields.ins().srcFieldVin))
                .connect(dynamicConfInfo)
                .process(new CloudPredictProcessFunction())
                ;
        //3. 结果标准化
        DataStream resultData = estimateDwsTask.dataStd(processDataStream);
        //4. 数据输出
        estimateDwsTask.outPutData(resultData);
//        resultData.printToErr();
        //end. 启动
        env.execute(estimateDwsTask.dwsTopic);
    }

    private DataStream<JSONObject> readFile(StreamExecutionEnvironment env) {
        DataStreamSource<String> file = env.readTextFile("D:\\file_crt\\tmp\\LGWEEUA58NE201347.txt", "utf-8");
        SingleOutputStreamOperator<JSONObject> mapOperator = file.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String s) throws Exception {
                String[] sp = s.split(",");
                if (sp.length==1){
                    sp = s.split("\t");
                }
                JSONObject srcInfo = new JSONObject();
                srcInfo.put("vin",sp[21]);
                srcInfo.put("tid",sp[22]);
                srcInfo.put("brand",sp[1]);
                srcInfo.put("vehicletype",sp[23]);
                srcInfo.put("batter_type",sp[11]);
                srcInfo.put("batter_capacity",null);
                srcInfo.put("source","9");
                srcInfo.put("bms_chrg_sts",sp[3]);
                srcInfo.put("bms_batt_curr_rm",sp[4]);
                srcInfo.put("bms_rmc_module_temp_max",sp[5]);
                srcInfo.put("bms_rmc_module_temp_min",sp[6]);
                srcInfo.put("bms_dc_chrg_connect",sp[7]);
                srcInfo.put("obc_connect_sts_ev",sp[8]);
                srcInfo.put("bms_soc",sp[15]);
                srcInfo.put("soh_cor",sp[28]);
                srcInfo.put("bms_heatreq",sp[13]);
                srcInfo.put("bms_coolreq",sp[14]);
                srcInfo.put("tbox_batt_soc_lim",null);
                srcInfo.put("model_code",sp[26]);
                srcInfo.put("bms_cell_volt_max",sp[18]);
                srcInfo.put("bms_cell_volt_min",sp[19]);
                srcInfo.put("bms_charge_time",sp[27]);
                srcInfo.put("startChargeTime",sp[29]);
                srcInfo.put("latitude","");
                srcInfo.put("longitude","");
                srcInfo.put("mapId","");
                srcInfo.put("aoiid",StringUtils.replace(sp[32],"#",","));
                srcInfo.put("poiid",sp[36]);
                srcInfo.put("acambtemp",sp[31]);
                srcInfo.put("acopensts",sp[30]);
                srcInfo.put("bms_innersocmax",sp[16]);
                srcInfo.put("bms_innersocmin",sp[17]);
                srcInfo.put("bms_inlettemps",sp[31]);
                return srcInfo;
            }
        });
        return mapOperator;
    }

    private String innerParamP="p";
    private void topicSettings(ParameterTool parameterTool) {
        String bootActive = System.getenv().get(DefaultConstant.BOOT_PROFILES_ACTIVE);
        System.out.println("CloudPredictDwsTask-- boot_active ===== "+bootActive);
    }

    /**
     * @description: 结果输出
     * @author:  GW00280745
     **/
    private void outPutData(DataStream resultData) {
        //end. 信息sink到对应topic并持久化
        sink2Kafka(resultData);
//        ChargeData2Hive.builder().input(resultData).build().process()
    }
    public void sink2Kafka(DataStream dataStream) {
        String kafkaServer = ChargePredictFields.ins().confInfo.getOrDefault("bootstrap.servers","10.31.8.112:9092,10.31.8.113:9092,10.31.8.114:9092");
        Properties prodProps = new Properties();
        prodProps.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer);
        prodProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        prodProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
//        prodProps.put(ProducerConfig.ACKS_CONFIG,1)
        String topic = dwsTopic;
        System.out.println("CloudPredictDwsTask-- 目的地dwsTopic ===== "+topic);
        ChargePredictProducer producer = new ChargePredictProducer(prodProps, topic);
        dataStream
                . map((MapFunction) value -> {
                    JSONObject jsonObject = new JSONObject();
                    jsonObject.put(ChargePredictFields.ins().srcFieldSource, value);
                    return GwmKafkaJson.builder().timestamp(System.currentTimeMillis()).type("json").body(jsonObject).build();
                })
                .returns(Types.GENERIC(JSONObject.class))
                .addSink(producer)
                .name("sink2kafka")
        ;
    }
    /**
     * @description: 结果标准化
     * @author:  GW00280745
     **/
    private DataStream dataStd(DataStream<JSONObject> processDataStream){
        JSONObject resJson = ChargePredictFields.ins().multiResInfo;
        ChargePredictResStandardMapFunction resStandard = new ChargePredictResStandardMapFunction(resJson);
        SingleOutputStreamOperator returns = processDataStream
                .map(resStandard)
                .returns(Types.GENERIC(JSONObject.class))
                ;
        return returns;
    }
    /**
     * @description: source数据标准化，目前只是简单的字段归一，未涉及字段和value的transform
     * @author:  GW00280745
     * @date: 2022/7/4 8:10
     * @param:
     * @param: env
     * @return:
     * @return: org.apache.flink.streaming.api.datastream.DataStream<com.alibaba.fastjson.JSONObject>
     **/
    private DataStream<JSONObject> sourceStd(StreamExecutionEnvironment env) {
        //1.构造consumer
        String kafkaServer = ChargePredictFields.ins().confInfo.getOrDefault("bootstrap.servers","10.31.8.112:9092,10.31.8.113:9092,10.31.8.114:9092");
        String topic = dwmTopic;
        String groupId = topic.concat("#1");
        System.out.println("CloudPredictDwsTask-- 来源dwmTopic ===== "+topic);
        System.out.println(String.format(" groupId ===== [ kafka-consumer-groups --bootstrap-server %s --describe --group %s",kafkaServer,groupId));
        FlinkKafkaConsumer<String> consumer = FlinkKafkaUtil.getConsumer(kafkaServer, groupId, topic);
        consumer.setStartFromEarliest();
        //2.字段映射表
        JSONObject srcJson = ChargePredictFields.ins().srcInfo;
        ChargePredictSrcStandardMapFunction srcStandard = new ChargePredictSrcStandardMapFunction(srcJson);
        //3.flink通过consumer读取kafka数据 同时进行字段标准化
        DataStream<JSONObject> sourceDataStream = env
                .addSource(consumer)
                .map(srcStandard)
                .returns(Types.GENERIC(JSONObject.class))
                ;
        //用于模拟es13实时数据
        DataStream<JSONObject> filter = debugVin(sourceDataStream);
        return filter;
    }
    private DataStream<JSONObject> longLat(DataStream<JSONObject> src) {
        try {
            String a = "D:\\file_crt\\tmp\\longlat.txt";
            List<String> lines = FileUtils.readLines(new File(a), "utf8");
            SingleOutputStreamOperator<JSONObject> operator = src.map(new LongLat(lines));
            return operator;
        }catch (Exception e){
            try {
                String a = "/vmdata/tmp/mcp/longlat.txt";
                List<String> lines = FileUtils.readLines(new File(a), "utf8");
                SingleOutputStreamOperator<JSONObject> operator = src.map(new LongLat(lines));
                return operator;
            }catch (Exception ex){
                ex.printStackTrace();
            }
        }
        return src;
    }
    private DataStream<JSONObject> debugVin(DataStream<JSONObject> sourceDataStream) {
        if (false){
            DataStream<JSONObject> filter = sourceDataStream.filter((FilterFunction<JSONObject>) value -> {
                String vin = value.getString(ChargePredictFields.ins().srcFieldVin);
                String startChargeTime = value.getString(ChargePredictFields.ins().srcFieldStartChargeTime);
                long time = DateUtils.parseDate("2023-02-19 17:36:21", "yyyy-MM-dd HH:mm:ss").getTime();
                boolean b = StringUtils.equalsIgnoreCase(vin, "LGWEEUA51NC002667") && StringUtils.equalsIgnoreCase(startChargeTime, String.valueOf(time));
//                boolean b = StringUtils.equalsIgnoreCase(vin, "LGWEEUA51NC002233") ;
                if (b){
                    return true;
                }
                return false;
            });
            return filter;
        }else {
            return sourceDataStream;
        }
    }
    /**
     * @description: 动态配置信息--当前主要是读取web端配置,实时更新
     * @author:  GW00280745
     * @date: 2022/7/23 10:40
     **/
    private BroadcastStream<JSONObject> dynamicConfInfo(StreamExecutionEnvironment env) {
        String url = ChargePredictFields.ins().confInfo.getOrDefault("mysql.url","jdbc:mysql://10.255.128.243:3306/vp_business?useUnicode=true&characterEncoding=utf8&useSSL=false");
        String user = ChargePredictFields.ins().confInfo.getOrDefault("mysql.user","root");
        String password = ChargePredictFields.ins().confInfo.getOrDefault("mysql.password","vaas_sql*");
        String sql = ChargePredictFields.ins().confInfo.getOrDefault("mysql.sql","select topic,config_key,config_value from config_data where topic='chargePredict'");
        String timeout = ChargePredictFields.ins().confInfo.getOrDefault("mysql.interval_time","60");
        MapStateDescriptor<String, Map<String, Object>> confInfo = new MapStateDescriptor<>("conf-info", BasicTypeInfo.STRING_TYPE_INFO, new MapTypeInfo<>(String.class, Object.class));
        BroadcastStream<JSONObject> broadcastStream = env.addSource(new ChargePredictMysqlSourceFunction(url, user, password, sql, timeout))
                .setParallelism(1)
                .broadcast(confInfo)
                ;
        return broadcastStream;
    }

    /**
     * @description: 增量检查点，表示在 checkpoint 时，只备份和上个检查点相比，发生变化的检查点==增量更新
     * @author:  GW00280745
     * @date: 2022/7/1 15:34
     * @param:
     * @param: env
     * @return:
     **/
    private void checkPoint(StreamExecutionEnvironment env) {
        //1. 设置 state backend 状态后端
        //state多的时候需要开启增量检查点 如果是全量检查点并且状态数据较多那么会导致checkpoint失败
        //true代表增量检查点
        env.setStateBackend(new EmbeddedRocksDBStateBackend(true));

        //2.checkpoint
        CheckpointConfig config = env.getCheckpointConfig();
        // 设置模式为exactly-once ; 模式支持EXACTLY_ONCE()/AT_LEAST_ONCE()
        config.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的周期, 每隔5000 ms进行启动一个检查点 ; 启用 checkpoint,设置触发间隔（两次执行开始时间间隔）
        config.setCheckpointInterval(120*1000L);
        // 检查点必须在一分钟内完成，或者被丢弃 checkpoint的超时时间
        config.setCheckpointTimeout(60*1000L);
        // 同时并发数量; 同一时间只允许进行一个检查点
        config.setMaxConcurrentCheckpoints(1);
        // 任务流取消和故障时会保留Checkpoint数据，以便根据实际需要恢复到指定的Checkpoint ; 外部checkpoint(例如文件存储)清除策略
        config.enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //存储位置，FileSystemCheckpointStorage(文件存储)
        config.setCheckpointStorage(cpDir);
        // 确保检查点之间有至少500 ms的间隔-checkpoint最小间隔 ;最小间隔时间（前一次结束时间，与下一次开始时间间隔）
        config.setMinPauseBetweenCheckpoints(100*1000);
        //失败重试
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, Time.seconds(10)));
        config.setTolerableCheckpointFailureNumber(CheckpointFailureManager.UNLIMITED_TOLERABLE_FAILURE_NUMBER);

    }
}
