package com.we.risk.phoneInfo.KfkDataBak;

import com.alibaba.fastjson.JSONObject;
import com.we.flink.utils.WeKafkaPropertyReader;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.streaming.util.serialization.KeyedSerializationSchema;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Optional;
import java.util.Properties;

public class PhoneInfoDataBakKfkTest {
    public static final String RELEASEPROP =
            "risk/phoneinfo/kfkdatabak/databak_self_test.properties";
    public static final String USERKEY = "userKey";
    public static final String TIMESTAMP = "timestamp";
    public static final String JOBID = "jobID";
    public static final String KEYBY = "userKey";
    public static final String MSGTYPE = "HFQ_UPLOAD_PHONE_INFO_RECORD";
    public static final Long KFKMSGMAXSIZE = 200 * 1024 * 1024L;
    // 23-07-18 --- 23-07-19
    public static final Long TIMESTART = 1689609600000L;
    public static final Long TIMEEND = 1689696000000L;
    public static Logger LOG = LoggerFactory.getLogger(PhoneInfoDataBakKfkTest.class);

    public static final String DATEFORMAT = "yyyy-MM-dd";

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        try {
            WeKafkaPropertyReader paramReader = WeKafkaPropertyReader.init(RELEASEPROP);
            /** RocksDB */
            env.setStateBackend(new RocksDBStateBackend(paramReader.getRocksDBBackendUrl()));
            /** checkpoint configureKEYBY */
            CheckpointConfig ckConf = env.getCheckpointConfig();
            ckConf.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
            ckConf.setCheckpointInterval(10 * 60 * 1000); // ms
            ckConf.setCheckpointTimeout(60 * 60 * 1000);
            ckConf.setMaxConcurrentCheckpoints(1);
            ckConf.setMinPauseBetweenCheckpoints(500);
            ckConf.enableExternalizedCheckpoints(
                    CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

            ckConf.enableUnalignedCheckpoints();

            /** Consumer Kafka */
            String srcKafkaTopic = paramReader.getTmpKfkTopic();
            String srcKafkaBootStrapServer = paramReader.getTmpKfkBootStrapServer();
            String srckfkGrupId = paramReader.getTmpKfkGroupId();

            Properties consumProp = new Properties();
            consumProp.setProperty("bootstrap.servers", srcKafkaBootStrapServer);
            consumProp.setProperty("group.id", srckfkGrupId);
            consumProp.setProperty(
                    "key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            consumProp.setProperty(
                    "value.deserializer",
                    "org.apache.kafka.common.serialization.StringDeserializer");

            KafkaSource<String> kfkSource =
                    KafkaSource.<String>builder()
                            .setTopics(srcKafkaTopic)
                            .setStartingOffsets(OffsetsInitializer.earliest())
                            .setProperties(consumProp)
                            .setValueOnlyDeserializer(new SimpleStringSchema())
                            .build();

            DataStreamSource<String> input =
                    env.fromSource(
                            kfkSource, WatermarkStrategy.noWatermarks(), "phonerecord_kfk_data");

            SingleOutputStreamOperator<String> out =
                    input.uid("phonerecord_kfk_data-source")
                            .filter(new PhoneRecordDataFilterFunc())
                            .flatMap(new PhoneRecordDataRichFlatMapFunc())
                            .keyBy(
                                    new KeySelector<String, String>() {
                                        @Override
                                        public String getKey(String value) throws Exception {

                                            JSONObject inputJson = JSONObject.parseObject(value);
                                            JSONObject dataJson =
                                                    inputJson.getJSONObject("dataJson");
                                            String user_key = dataJson.getString(USERKEY);
                                            return user_key;
                                        }
                                    })
                            .flatMap(
                                    new RichFlatMapFunction<String, String>() {
                                        ValueState<UserPhoneInfoTime> userprTimeVState;

                                        @Override
                                        public void open(Configuration parameters)
                                                throws Exception {
                                            userprTimeVState =
                                                    getRuntimeContext()
                                                            .getState(
                                                                    new ValueStateDescriptor<
                                                                            UserPhoneInfoTime>(
                                                                            "userprTimeVState",
                                                                            UserPhoneInfoTime
                                                                                    .class));
                                        }

                                        @Override
                                        public void flatMap(String value, Collector<String> out)
                                                throws Exception {
                                            JSONObject inputJson = JSONObject.parseObject(value);
                                            JSONObject dataJson =
                                                    inputJson.getJSONObject("dataJson");
                                            Long captureTime = dataJson.getLong(TIMESTAMP);
                                            String user_key = dataJson.getString(USERKEY);

                                            UserPhoneInfoTime tmpState = userprTimeVState.value();
                                            UserPhoneInfoTime curState =
                                                    (tmpState == null
                                                            ? UserPhoneInfoTime.init(user_key)
                                                            : tmpState);

                                            if (curState.cap_time < captureTime) {
                                                curState.cap_time = captureTime;
                                                out.collect(value);
                                                userprTimeVState.update(curState);
                                            }
                                        }
                                    });

            /** sink to Kafka */
            String sinkkfkTopic = paramReader.getKfkTopic();
            Properties sinkProp = new Properties();
            sinkProp.setProperty("max.request.size", String.valueOf(KFKMSGMAXSIZE));
            sinkProp.setProperty("buffer.memory", String.valueOf(KFKMSGMAXSIZE));
            sinkProp.setProperty("bootstrap.servers", paramReader.getKfkBootStrapServer());
            int sinkkfkPartitions = paramReader.getKfkPartitions();

            FlinkKafkaProducer<String> kafkaProducer =
                    new FlinkKafkaProducer<String>(
                            sinkkfkTopic,
                            new WeKafkaKeyedSerializationSchema(),
                            sinkProp,
                            Optional.of(new WeKafkaCustomPartitioner()));

            out.addSink(kafkaProducer).setParallelism(sinkkfkPartitions);

            env.execute(PhoneInfoDataBakKfkTest.class.toString());

        } catch (Exception e) {
            System.out.println(e.getMessage());
            LOG.error(e.getMessage());
            e.printStackTrace();
        }
    }

    private static class PhoneRecordDataRichFlatMapFunc
            extends RichFlatMapFunction<String, String> {

        @Override
        public void flatMap(String value, Collector<String> out) throws Exception {
            JSONObject inputJson = JSONObject.parseObject(value);
            if (inputJson != null && inputJson.getString("type").equals(MSGTYPE)) {
                JSONObject dataJson = inputJson.getJSONObject("dataJson");
                Long captureTime = dataJson.getLong(TIMESTAMP);

                if (captureTime >= TIMESTART && captureTime <= TIMEEND) {
                    JSONObject job = dataJson.getJSONObject("job");
                    String jobID = job.getString(JOBID);
                    String userKey = dataJson.getString(USERKEY);
                    if (jobID != null && userKey != null) {
                        out.collect(value);
                    }
                }
                //                JSONObject job = dataJson.getJSONObject("job");
                //                String jobID = job.getString(JOBID);
                //
                //                String userKey = dataJson.getString(USERKEY);
                //                JSONObject data = dataJson.getJSONObject("data");
                //                JSONArray calls = data.getJSONArray(PHONECALLLIST);
                //                if(jobID != null && userKey != null) {
                //                    JSONObject outJson = new JSONObject();
                //                    outJson.put(JOBID, jobID);
                //                    outJson.put(USERKEY, userKey);
                //                    outJson.put("captureTime", captureTime);
                //                    outJson.put(PHONECALLLIST, calls);
                //
                //                    out.collect(outJson.toString());
                //                }
            }
        }
    }

    private static class PhoneRecordDataFilterFunc implements FilterFunction<String> {
        @Override
        public boolean filter(String value) throws Exception {
            return value.contains(MSGTYPE);
        }
    }

    private static class WeKafkaKeyedSerializationSchema
            implements KeyedSerializationSchema<String> {
        @Override
        public byte[] serializeKey(String element) {
            JSONObject inputJson = JSONObject.parseObject(element);
            JSONObject dataJson = inputJson.getJSONObject("dataJson");
            String user_key = dataJson.getString(USERKEY);
            return user_key.getBytes();
        }

        @Override
        public byte[] serializeValue(String element) {
            return element.getBytes();
        }

        @Override
        public String getTargetTopic(String element) {
            return null;
        }
    }

    private static class WeKafkaCustomPartitioner extends FlinkKafkaPartitioner<String> {

        @Override
        public int partition(
                String record, byte[] key, byte[] value, String targetTopic, int[] partitions) {
            int partition = Math.abs(new String(key).hashCode() % partitions.length);
            if (LOG.isDebugEnabled()) {
                LOG.info(
                        " partitions: "
                                + partitions.length
                                + " partition: "
                                + partition
                                + " key: "
                                + new String(key));
            }
            return partition;
        }
    }

    private static long timeBeforeDays(long now, int days) {
        Calendar calendar = Calendar.getInstance();
        calendar.setTimeInMillis(now);
        calendar.add(Calendar.DATE, -days);
        long res = calendar.getTimeInMillis();
        return res;
    }

    private static long getStartTimeOfDay(long now) {
        SimpleDateFormat sdf = new SimpleDateFormat(DATEFORMAT);
        String date = sdf.format(new Date(now));
        long res = 0;
        try {
            res = sdf.parse(date).getTime();
        } catch (Exception e) {
            LOG.error("sdf parse date error! time: " + now);
            e.printStackTrace();
        }
        return res;
    }
}
