package com.we.risk.registerchanneldeprecated.basicinfo.adm;

import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.*;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.streaming.util.serialization.KeyedSerializationSchema;
import org.apache.flink.util.Collector;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.we.flink.utils.WeKafkaPropertyReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.io.InputStream;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;

public class AdmUserBasicInfo {
    public static final String INPUT_KEY_BY = "uid";
    public static final String OUTPUT_KEY_BY = "user_key";
    public static final String ID_COMBINE = "id_combine";
    //    public static final String REG_TIME = "register_time";
    //    public static final String FIRST_AUDIT_SUCCESS_TIME = "first_audit_success_time";
    //    public static final String LAST_AUDIT_TIME_BF_SUC ="last_audit_time_bf_audit_success";
    //    public static final String ORIGIN_NAME = "origin_name";
    public static final String RELEASEPROP =
            "risk/registerchannel/userbasicinfo/kfk_user_account_audit_prod.properties";
    public static final String USERLABELPROP =
            "risk/registerchannel/userbasicinfo/adm_user_basic_info_prod.properties";
    public static Logger LOG = LoggerFactory.getLogger(AdmUserBasicInfo.class);

    public static void main(String[] args) throws IOException {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        try {
            WeKafkaPropertyReader paramReader = WeKafkaPropertyReader.init(RELEASEPROP);
            /** RocksDB */
            env.setStateBackend(new RocksDBStateBackend(paramReader.getRocksDBBackendUrl()));
            /** checkpoint configure */
            CheckpointConfig ckConf = env.getCheckpointConfig();
            ckConf.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
            ckConf.setCheckpointInterval(10 * 60 * 1000); // ms
            ckConf.setCheckpointTimeout(60 * 60 * 1000);
            ckConf.setMaxConcurrentCheckpoints(1);
            ckConf.setMinPauseBetweenCheckpoints(500);
            ckConf.enableExternalizedCheckpoints(
                    CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

            ckConf.enableUnalignedCheckpoints();

            /** Consumer Kafka */
            String srcKafkaTopic = paramReader.getTmpKfkTopic();
            String srcKafkaBootStrapServer = paramReader.getTmpKfkBootStrapServer();
            String srckfkGrupId = paramReader.getTmpKfkGroupId();
            String srckfkOffset = paramReader.getTmpKfkOffset();

            Properties consumProp = new Properties();
            consumProp.setProperty("bootstrap.servers", srcKafkaBootStrapServer);
            consumProp.setProperty("group.id", srckfkGrupId);
            consumProp.setProperty("auto.offset.reset", srckfkOffset);
            consumProp.setProperty(
                    "key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            consumProp.setProperty(
                    "value.deserializer",
                    "org.apache.kafka.common.serialization.StringDeserializer");

            FlinkKafkaConsumer<String> kfkSource =
                    new FlinkKafkaConsumer<String>(
                            srcKafkaTopic, new SimpleStringSchema(), consumProp);
            DataStreamSource<String> input = env.addSource(kfkSource);

            SingleOutputStreamOperator<String> out =
                    input.uid("AdmUserBasicinfo-input")
                            .keyBy(
                                    new KeySelector<String, String>() {
                                        @Override
                                        public String getKey(String value) throws Exception {
                                            return JSONObject.parseObject(value)
                                                    .getString(INPUT_KEY_BY);
                                        }
                                    })
                            .flatMap(new AdmUserBasicInfoFlatMapFunc())
                            //            .filter(new FilterFunction<String>() {
                            //                @Override
                            //                public boolean filter(String value) throws Exception {
                            //                    JSONObject jsonObject =
                            // JSONObject.parseObject(value);
                            //                    if (jsonObject.containsKey(OUTPUT_KEY_BY)) {
                            //                        return (jsonObject.get(OUTPUT_KEY_BY) !=
                            // null);
                            //                    } else {
                            //                        return false;
                            //                    }
                            //                }
                            //            })
                            .keyBy(
                                    new KeySelector<String, String>() {
                                        @Override
                                        public String getKey(String value) throws Exception {
                                            JSONObject inputJson = JSONObject.parseObject(value);
                                            String userKey = inputJson.getString(OUTPUT_KEY_BY);
                                            String idCombine = inputJson.getString(ID_COMBINE);
                                            idCombine = idCombine == null ? "null" : idCombine;
                                            return userKey + idCombine;
                                        }
                                    })
                            .flatMap(new UserLabelRichFlatMapFunc());

            /** sink to Kafka */
            String sinkkfkTopic = paramReader.getKfkTopic();
            Properties sinkProp = new Properties();
            sinkProp.setProperty("bootstrap.servers", paramReader.getKfkBootStrapServer());
            int sinkkfkPartitions = paramReader.getKfkPartitions();

            FlinkKafkaProducer<String> userLabelAllFlinkKafkaProducer =
                    new FlinkKafkaProducer<String>(
                            sinkkfkTopic,
                            new WeKafkaKeyedSerializationSchema(),
                            sinkProp,
                            java.util.Optional.of(new WeKafkaCustomPartitioner()));

            out.addSink(userLabelAllFlinkKafkaProducer).setParallelism(sinkkfkPartitions);

            env.execute(AdmUserBasicInfo.class.toString());
        } catch (Exception e) {
            LOG.error("Exception: " + e.getMessage());
            e.printStackTrace();
        }
    }

    private static class AdmUserBasicInfoFlatMapFunc extends RichFlatMapFunction<String, String> {
        ValueState<JSONObject> userSourceState;
        MapState<String, Integer> idcombineMState;
        //        MapState<Long, Integer> auditTimeMState;

        @Override
        public void open(Configuration parameters) throws Exception {
            StateTtlConfig ttlConfig =
                    StateTtlConfig.newBuilder(Time.days(31))
                            .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                            .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)
                            .cleanupInRocksdbCompactFilter(100000L)
                            .build();

            userSourceState =
                    getRuntimeContext()
                            .getState(
                                    new ValueStateDescriptor<JSONObject>(
                                            "userSourceOriginVState", JSONObject.class));

            MapStateDescriptor<String, Integer> idcombineStateDesc =
                    new MapStateDescriptor<>("idcombineMStateDesc", String.class, Integer.class);
            idcombineStateDesc.enableTimeToLive(ttlConfig);
            idcombineMState = getRuntimeContext().getMapState(idcombineStateDesc);

            //            MapStateDescriptor<Long, Integer> auditTimeStateDesc
            //                    = new MapStateDescriptor<>("idcombineMStateDesc", Long.class,
            // Integer.class);
            //            auditTimeMState = getRuntimeContext()
            //                    .getMapState(auditTimeStateDesc);
        }

        @Override
        public void flatMap(String value, Collector<String> out) throws Exception {
            try {
                JSONObject inputJson = JSONObject.parseObject(value);
                // LOG.warn("input value: " + value);
                if (inputJson != null) {
                    JSONObject tmpState = userSourceState.value();
                    JSONObject curState =
                            tmpState == null ? initJsonObjState(USERLABELPROP) : tmpState;

                    // init uid
                    if (curState.getLong(INPUT_KEY_BY) == null) {
                        curState.put(INPUT_KEY_BY, inputJson.get(INPUT_KEY_BY));
                    }

                    // Mstate存明细id_combine
                    if (inputJson.containsKey(ID_COMBINE)) {
                        idcombineMState.put(inputJson.getString(ID_COMBINE), 1);
                        if (curState.getString(OUTPUT_KEY_BY) != null) {
                            curState.put(ID_COMBINE, inputJson.getString(ID_COMBINE));
                            // LOG.warn("#1 output: " + curState.toString());
                            out.collect(curState.toString());
                        }
                    } else {
                        // 除id_combine外其他属性更新
                        Set<String> keySet = inputJson.keySet();
                        for (String key : keySet) {
                            if (curState.containsKey(key) && !key.equals(INPUT_KEY_BY)) {
                                curState.put(key, inputJson.get(key));
                            }
                        }
                        userSourceState.update(curState);

                        if (curState.getString(OUTPUT_KEY_BY) != null) {
                            if (!idcombineMState.isEmpty()) {
                                Iterator<String> idcombineList = idcombineMState.keys().iterator();
                                while (idcombineList.hasNext()) {
                                    String idcombine = idcombineList.next();
                                    curState.put(ID_COMBINE, idcombine);
                                    // LOG.warn("#2 output: " + curState.toString());
                                    out.collect(curState.toString());
                                }
                            } else {
                                // LOG.warn("#3 no id_combine output: " + curState.toString());
                                out.collect(curState.toString());
                            }
                        }
                    }

                    //                    if(inputJson.containsKey(OUTPUT_KEY_BY)) {
                    //                        curState.put(OUTPUT_KEY_BY,
                    // inputJson.getString(OUTPUT_KEY_BY));
                    //                        curState.put(REG_TIME, inputJson.getLong(REG_TIME));
                    //                    }
                    //                    if(inputJson.containsKey(ORIGIN_NAME)) {
                    //                        curState.put(ORIGIN_NAME,
                    // inputJson.getString(ORIGIN_NAME));
                    //                    }
                    //
                    //                    if(inputJson.containsKey(FIRST_AUDIT_SUCCESS_TIME)) {
                    //                        curState.put(FIRST_AUDIT_SUCCESS_TIME,
                    // inputJson.getLong(FIRST_AUDIT_SUCCESS_TIME));
                    //                    }
                    //
                    //                    if(inputJson.containsKey(LAST_AUDIT_TIME_BF_SUC)) {
                    //                        curState.put(LAST_AUDIT_TIME_BF_SUC,
                    // inputJson.getLong(LAST_AUDIT_TIME_BF_SUC));
                    //                    }

                }

            } catch (Exception e) {
                LOG.error("Excep: " + e.toString());
                LOG.error("Excep input: " + value);
            }
        }
    }

    private static class UserLabelRichFlatMapFunc extends RichFlatMapFunction<String, String> {
        ValueState<JSONObject> userLabelVState;

        @Override
        public void open(Configuration parameters) throws Exception {
            userLabelVState =
                    getRuntimeContext()
                            .getState(
                                    new ValueStateDescriptor<JSONObject>(
                                            "userLabelVState", JSONObject.class));
        }

        @Override
        public void flatMap(String value, Collector<String> out) throws Exception {
            try {
                JSONObject tmp = userLabelVState.value();
                JSONObject curState = (tmp == null ? initJsonObjState(USERLABELPROP) : tmp);
                // LOG.warn("#Final input: " + value);
                JSONObject inputJson = JSON.parseObject(value);
                String userKeyVal = inputJson.getString(OUTPUT_KEY_BY);
                String idCombine = inputJson.getString(ID_COMBINE);

                JSONObject outJson = new JSONObject();
                outJson.put(OUTPUT_KEY_BY, userKeyVal);
                outJson.put(ID_COMBINE, (idCombine == null ? "null" : idCombine));

                for (Map.Entry<String, Object> inputEle : inputJson.entrySet()) {
                    String key = inputEle.getKey();
                    Object val = inputEle.getValue();

                    if (!key.equals(OUTPUT_KEY_BY) && !key.equals(ID_COMBINE)) {
                        Object jsonVal = curState.get(key);
                        if (jsonVal != null && val != null) {
                            if (!jsonVal.equals(val)) {
                                curState.put(key, val);
                                outJson.put("label_name", key);
                                outJson.put("label_value", val);
                                // LOG.warn("#Final1 outJson: " + outJson.toString());
                                out.collect(outJson.toString());
                            }
                        } else {
                            if (jsonVal != val) {
                                curState.put(key, val);
                                outJson.put("label_name", key);
                                outJson.put("label_value", val);
                                // LOG.warn("#Final2 outJson: " + outJson.toString());
                                out.collect(outJson.toString());
                            }
                        }
                    }
                }
                userLabelVState.update(curState);
            } catch (Exception e) {
                LOG.error("Excep: " + e.toString());
                LOG.error("Excep input: " + value);
            }
        }
    }

    private static JSONObject initJsonObjState(String propFile) throws IOException {

        JSONObject res = new JSONObject();
        InputStream resourceAsStream =
                AdmUserBasicInfo.class.getClassLoader().getResourceAsStream(propFile);
        ParameterTool reader = ParameterTool.fromPropertiesFile(resourceAsStream);
        Set<Object> propSets = reader.getProperties().keySet();
        for (Object key : propSets) {
            String typeValue = reader.get((String) key);
            String[] splits = typeValue.split(":");
            if (splits.length == 2) {
                String type = splits[0];
                String value = splits[1];
                jsonObjectSetValue(res, type, (String) key, value);
            } else {
                LOG.error("split Error: " + splits.length);
            }
        }
        return res;
    }

    private static void jsonObjectSetValue(
            JSONObject jsonObj, String type, String key, String value) {
        if (!value.equals("null")) {
            switch (type) {
                case "Long":
                    jsonObj.put(key, Long.parseLong(value));
                    break;
                case "String":
                    jsonObj.put(key, value);
                    break;
                case "Float":
                    jsonObj.put(key, Float.parseFloat(value));
                    break;
                case "Integer":
                    jsonObj.put(key, Integer.parseInt(value));
                    break;
                default:
                    LOG.error("Not support this type: " + type);
                    break;
            }
        } else {
            jsonObj.put(key, null);
        }
    }

    private static class WeKafkaKeyedSerializationSchema
            implements KeyedSerializationSchema<String> {
        @Override
        public byte[] serializeKey(String element) {
            JSONObject jsonObject = JSONObject.parseObject(element);
            String key1 = jsonObject.getString(OUTPUT_KEY_BY);
            String key2 = jsonObject.getString(ID_COMBINE);
            return (key1 + "_" + key2).getBytes();
        }

        @Override
        public byte[] serializeValue(String element) {
            return element.getBytes();
        }

        @Override
        public String getTargetTopic(String element) {
            return null;
        }
    }

    private static class WeKafkaCustomPartitioner extends FlinkKafkaPartitioner<String> {

        @Override
        public int partition(
                String record, byte[] key, byte[] value, String targetTopic, int[] partitions) {
            int partition = Math.abs(new String(key).hashCode() % partitions.length);
            if (LOG.isDebugEnabled()) {
                LOG.info(
                        " partitions: "
                                + partitions.length
                                + " partition: "
                                + partition
                                + " key: "
                                + new String(key));
            }
            return partition;
        }
    }
}
