package com.we.risk.modelFeatureMnt;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.streaming.util.serialization.KeyedSerializationSchema;
import org.apache.flink.util.Collector;

import com.alibaba.fastjson.JSONObject;
import com.we.flink.utils.WeKafkaPropertyReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.sql.Timestamp;
import java.util.Properties;
import java.util.Set;
@Deprecated
public class ModelFeatureDataCheck {
    public static final String RELEASEPROP = "risk/modelfeature/modelfeaturedatacheck.properties";
    public static Logger LOG = LoggerFactory.getLogger(ModelFeatureDataCheck.class);

    public static void main(String[] args) throws IOException {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        try {
            WeKafkaPropertyReader paramReader = WeKafkaPropertyReader.init(RELEASEPROP);
            /** RocksDB */
            env.setStateBackend(new RocksDBStateBackend(paramReader.getRocksDBBackendUrl()));
            /** checkpoint configure */
            CheckpointConfig ckConf = env.getCheckpointConfig();
            ckConf.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
            ckConf.setCheckpointInterval(10 * 60 * 1000); // ms
            ckConf.setCheckpointTimeout(60 * 60 * 1000);
            ckConf.setMaxConcurrentCheckpoints(1);
            ckConf.setMinPauseBetweenCheckpoints(500);
            ckConf.enableExternalizedCheckpoints(
                    CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

            ckConf.enableUnalignedCheckpoints();

            /** Consumer Kafka */
            String srcKafkaTopic = paramReader.getTmpKfkTopic();
            String srcKafkaBootStrapServer = paramReader.getTmpKfkBootStrapServer();
            String srckfkGrupId = paramReader.getTmpKfkGroupId();
            String srckfkOffset = paramReader.getTmpKfkOffset();

            Properties consumProp = new Properties();
            consumProp.setProperty("bootstrap.servers", srcKafkaBootStrapServer);
            consumProp.setProperty("group.id", srckfkGrupId);
            //            consumProp.setProperty("auto.offset.reset",srckfkOffset);
            consumProp.setProperty(
                    "key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            consumProp.setProperty(
                    "value.deserializer",
                    "org.apache.kafka.common.serialization.StringDeserializer");

            KafkaSource<String> kfkSource =
                    KafkaSource.<String>builder()
                            .setTopics(srcKafkaTopic)
                            .setStartingOffsets(OffsetsInitializer.latest())
                            .setProperties(consumProp)
                            .setValueOnlyDeserializer(new SimpleStringSchema())
                            .build();

            DataStreamSource<String> input =
                    env.fromSource(
                            kfkSource, WatermarkStrategy.noWatermarks(), "RiskFeatureDataSoure");

            SingleOutputStreamOperator<String> out =
                    input.flatMap(
                            new RichFlatMapFunction<String, String>() {
                                @Override
                                public void flatMap(String data, Collector<String> out)
                                        throws Exception {
                                    JSONObject input = JSONObject.parseObject(data);
                                    if (input.containsKey("createTime")) {
                                        String createTime = input.getString("createTime");
                                        Timestamp timestamp = Timestamp.valueOf(createTime);
                                        if (timestamp.after(
                                                Timestamp.valueOf("2022-06-23 00:00:00"))) {
                                            String eventCode = input.getString("eventCode");
                                            String step = input.getString("step");
                                            String loanKey = input.getString("loanKey");

                                            JSONObject outputJson = new JSONObject();
                                            outputJson.put("event_code", eventCode);
                                            outputJson.put("step", step);
                                            outputJson.put("loankey", loanKey);
                                            outputJson.put("create_time", createTime);
                                            //                            int count = 0;
                                            if (input.containsKey("result")) {
                                                JSONObject result = input.getJSONObject("result");
                                                Set<String> featureSets = result.keySet();
                                                for (String modelId : featureSets) {
                                                    outputJson.put("model_id", modelId);
                                                    JSONObject modelJson =
                                                            result.getJSONObject(modelId);
                                                    Set<String> keySet = modelJson.keySet();
                                                    for (String key : keySet) {
                                                        Object value = modelJson.get(key);
                                                        String outv = value.toString();
                                                        outputJson.put("outk", key);
                                                        if (outv.length() > 256) {
                                                            System.out.println(
                                                                    "locate: "
                                                                            + eventCode
                                                                            + step
                                                                            + modelId
                                                                            + " loankey: "
                                                                            + loanKey
                                                                            + " k: "
                                                                            + key
                                                                            + " v: "
                                                                            + outv);
                                                        }
                                                        //
                                                        // outputJson.put("outv",value.toString());
                                                        out.collect(outputJson.toString());
                                                        //
                                                        // count++;
                                                    }
                                                }
                                            }
                                        }
                                        //                        System.out.println("key count: " +
                                        // count);
                                    } else {
                                        System.out.println(
                                                "Error: Input Json not contains key createTime");
                                    }
                                }
                            });

            /** sink to Kafka */
            //            String sinkkfkTopic = paramReader.getKfkTopic();
            //            Properties sinkProp = new Properties();
            //            sinkProp.setProperty("bootstrap.servers",
            // paramReader.getKfkBootStrapServer());
            //            int sinkkfkPartitions = paramReader.getKfkPartitions();
            //
            //            FlinkKafkaProducer<String> kafkaProducer = new
            // FlinkKafkaProducer<String>(sinkkfkTopic,
            //                    new WeKafkaKeyedSerializationSchema(),
            //                    sinkProp,
            //                    java.util.Optional.of(new WeKafkaCustomPartitioner()));
            //
            //            out.addSink(kafkaProducer).setParallelism(sinkkfkPartitions);

            env.execute(ModelFeatureDataCheck.class.toString());
        } catch (Exception e) {
            System.out.println("Exception: " + e.getMessage());
            LOG.error("Exception: " + e.getMessage());
            e.printStackTrace();
        }
    }

    private static class WeKafkaKeyedSerializationSchema
            implements KeyedSerializationSchema<String> {
        @Override
        public byte[] serializeKey(String element) {
            JSONObject jsonObject = JSONObject.parseObject(element);
            String step = jsonObject.getString("step");
            String eventCode = jsonObject.getString("event_code");
            String modelId = jsonObject.getString("model_id");

            String keyby = eventCode + step + modelId;
            return keyby.getBytes();
        }

        @Override
        public byte[] serializeValue(String element) {
            return element.getBytes();
        }

        @Override
        public String getTargetTopic(String element) {
            return null;
        }
    }

    private static class WeKafkaCustomPartitioner extends FlinkKafkaPartitioner<String> {

        @Override
        public int partition(
                String record, byte[] key, byte[] value, String targetTopic, int[] partitions) {
            int partition = Math.abs(new String(key).hashCode() % partitions.length);
            if (LOG.isDebugEnabled()) {
                LOG.info(
                        " partitions: "
                                + partitions.length
                                + " partition: "
                                + partition
                                + " key: "
                                + new String(key));
            }
            return partition;
        }
    }
}
