package com.we.risk.behavior.cross;

import com.alibaba.fastjson.JSONObject;
import com.we.flink.utils.WeKafkaPropertyReader;
import com.we.utils.CommonTimeTools;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.streaming.util.serialization.KeyedSerializationSchema;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class AdmCrossUserinfo {
    public static final String INPUT_KEY_BY = "user_key";

    public static final String OUTPUT_KEY_BY = "user_key";

    public static final String LOANID = "loan_id";

    public static final String APPLYITME = "apply_time";

    public static final String RELEASEPROP = "risk/behavior/cross/kfk_adm_cross_user_info_prod.properties";

    public static Logger LOG = LoggerFactory.getLogger(AdmCrossUserinfo.class);

    public static void main(String[] args) throws IOException {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        try {
            WeKafkaPropertyReader paramReader = WeKafkaPropertyReader.init(RELEASEPROP);
            env.setStateBackend((StateBackend)new RocksDBStateBackend(paramReader.getRocksDBBackendUrl()));

            CheckpointConfig ckConf = env.getCheckpointConfig();
            ckConf.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
            ckConf.setCheckpointInterval(600000L);
            ckConf.setCheckpointTimeout(3600000L);
            ckConf.setMaxConcurrentCheckpoints(1);
            ckConf.setMinPauseBetweenCheckpoints(500L);
            ckConf.enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
            ckConf.enableUnalignedCheckpoints();

            String srcKafkaTopic = paramReader.getTmpKfkTopic();
            String srcKafkaBootStrapServer = paramReader.getTmpKfkBootStrapServer();
            String srckfkGrupId = paramReader.getTmpKfkGroupId();
            String srckfkOffset = paramReader.getTmpKfkOffset();

            Properties consumProp = new Properties();
            consumProp.setProperty("bootstrap.servers", srcKafkaBootStrapServer);
            consumProp.setProperty("group.id", srckfkGrupId);
            consumProp.setProperty("auto.offset.reset", srckfkOffset);
            consumProp.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            consumProp.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

            FlinkKafkaConsumer<String> kfkSource = new FlinkKafkaConsumer(srcKafkaTopic, (DeserializationSchema)new SimpleStringSchema(), consumProp);
            DataStreamSource<String> input = env.addSource((SourceFunction)kfkSource);

            SingleOutputStreamOperator<String> out = input
            .uid("AdmCrossUserLabel-input")
            .keyBy(new KeySelector<String, String>() {
                @Override
                public String getKey(String value) throws Exception {
                    JSONObject inputJson = JSONObject.parseObject(value);
                    return inputJson.getString(INPUT_KEY_BY);
                }
            }).flatMap(new AdmCrossUserLabelRichFlatMapFunc());

            String sinkkfkTopic = paramReader.getKfkTopic();
            Properties sinkProp = new Properties();
            sinkProp.setProperty("bootstrap.servers", paramReader.getKfkBootStrapServer());
            sinkProp.setProperty("acks", "all");
            int sinkkfkPartitions = paramReader.getKfkPartitions();
            FlinkKafkaProducer<String> userLabelAllFlinkKafkaProducer = new FlinkKafkaProducer(sinkkfkTopic, new WeKafkaKeyedSerializationSchema(), sinkProp, Optional.of(new WeKafkaCustomPartitioner()));
            out.addSink((SinkFunction)userLabelAllFlinkKafkaProducer).setParallelism(sinkkfkPartitions);
            env.execute(AdmCrossUserinfo.class.toString());

        } catch (Exception e) {
            LOG.error("Exception: " + e.getMessage());
            e.printStackTrace();
        }
    }

    private static class AdmCrossUserLabelRichFlatMapFunc extends RichFlatMapFunction<String, String> {
        SimpleDateFormat sdf;

        MapState<String, JSONObject> rpyMapState;

        MapState<String, JSONObject> loanMapState;

        @Override
        public void open(Configuration parameters) throws Exception {
            this.sdf = new SimpleDateFormat("yyyy-MM-dd");
            MapStateDescriptor<String, JSONObject> repayMapStateDesc =
                    new MapStateDescriptor("repayMapState", String.class, JSONObject.class);
            MapStateDescriptor<String, JSONObject> loanMapStateDesc =
                    new MapStateDescriptor("loanMapState", String.class, JSONObject.class);
            this.rpyMapState = getRuntimeContext().getMapState(repayMapStateDesc);
            this.loanMapState = getRuntimeContext().getMapState(loanMapStateDesc);
        }

        @Override
        public void flatMap(String value, Collector<String> out) throws Exception {
            try {
                JSONObject inputJson = JSONObject.parseObject(value);
//                LOG.warn("input: " + value);
                if (inputJson.containsKey("loan_key")) {
                    String loan_key = inputJson.getString("loan_key");
                    String periods = inputJson.getString("periods");
//                    LOG.warn("Loan_key: " + loan_key + " periods: " + periods + " loanMapState.isEmpty(): " + this.loanMapState.isEmpty());
                    cleanState(this.loanMapState, APPLYITME, this.sdf);
                    if (!this.loanMapState.isEmpty()) {
                        Iterator<JSONObject> iterator = this.loanMapState.values().iterator();
                        while (iterator.hasNext()) {
                            JSONObject resJson = iterator.next();
                            resJson.putAll((Map)inputJson);
                            out.collect(resJson.toString());
                        }
                    } else {
                        out.collect(inputJson.toString());
                    }
                    this.rpyMapState.put(loan_key + "_" + periods, inputJson);
                } else if (inputJson.containsKey(LOANID)) {
                    String loan_id = inputJson.getString(LOANID);
//                    LOG.warn("loan_id: " + loan_id + " rpyMapState.isEmpty(): " + this.rpyMapState.isEmpty());
                    cleanState(this.rpyMapState, "payoff_time", this.sdf);
                    if (!this.rpyMapState.isEmpty()) {
                        Iterator<JSONObject> iterator = this.rpyMapState.values().iterator();
                        while (iterator.hasNext()) {
                            JSONObject resJson = iterator.next();
                            resJson.putAll((Map)inputJson);
                            out.collect(resJson.toString());
                        }
                    } else {
                        out.collect(inputJson.toString());
                    }
                    this.loanMapState.put(loan_id, inputJson);
                }
            } catch (Exception e) {
                LOG.error("AdmCrossUserLabelRichFlatMap Excep: " + e.getMessage());
                LOG.error("AdmCrossUserLabelRichFlatMap excep input: " + value);
            }
        }

        private void cleanState(MapState<String, JSONObject> state, String timeKey, SimpleDateFormat sdf) throws Exception {
            if (!state.isEmpty()) {
                Long currentTime = Long.valueOf(System.currentTimeMillis());
                ArrayList<String> timeLists = new ArrayList<>();
                Iterator<String> keysIterator = state.keys().iterator();
                while (keysIterator.hasNext()) {
                    String key = keysIterator.next();
                    JSONObject element = (JSONObject)state.get(key);
                    long cmpTime = element.getLongValue(timeKey) * 1000L;
                    if (CommonTimeTools.dateDiff(currentTime, Long.valueOf(cmpTime), sdf).intValue() > 122) {
//                        LOG.warn("cleanState key: " + key);
                        timeLists.add(key);
                    }
                }
                int listSize = timeLists.size();
                for (int i = 0; i < listSize; i++) {
                    state.remove(timeLists.get(i));
                }
            }
        }
    }

    private static class WeKafkaKeyedSerializationSchema implements KeyedSerializationSchema<String> {
        private WeKafkaKeyedSerializationSchema() {}

        @Override
        public byte[] serializeKey(String element) {
            JSONObject jsonObject = JSONObject.parseObject(element);
            String key = jsonObject.getString(OUTPUT_KEY_BY);
            return key.getBytes();
        }

        @Override
        public byte[] serializeValue(String element) {
            return element.getBytes();
        }

        @Override
        public String getTargetTopic(String element) {
            return null;
        }
    }

    private static class WeKafkaCustomPartitioner extends FlinkKafkaPartitioner<String> {
        private WeKafkaCustomPartitioner() {}

        @Override
        public int partition(String record, byte[] key, byte[] value, String targetTopic, int[] partitions) {
            int partition = Math.abs((new String(key)).hashCode() % partitions.length);
            if (LOG.isDebugEnabled()) {
                LOG.info(" partitions: " + partitions.length + " partition: " + partition + " key: " + new String(key));
            }
            return partition;
        }
    }
}
