package com.we.risk.attribution.correct;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.streaming.util.serialization.KeyedSerializationSchema;
import org.apache.flink.util.Collector;

import com.alibaba.fastjson.JSONObject;
import com.we.flink.utils.WeKafkaPropertyReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

import static com.we.risk.attribution.correct.Common.*;

public class CorrectAttribution {
    public static final String RELEASEPROP =
            "risk/attribution/correct/kfk_correct_audit_attribution_prod.properties";
    public static Logger LOG = LoggerFactory.getLogger(CorrectAttribution.class);

    public static void main(String[] args) throws IOException {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        try {
            WeKafkaPropertyReader paramReader = WeKafkaPropertyReader.init(RELEASEPROP);
            /** RocksDB */
            env.setStateBackend(new RocksDBStateBackend(paramReader.getRocksDBBackendUrl()));
            /** checkpoint configure */
            CheckpointConfig ckConf = env.getCheckpointConfig();
            ckConf.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
            ckConf.setCheckpointInterval(10 * 60 * 1000); // ms
            ckConf.setCheckpointTimeout(60 * 60 * 1000);
            ckConf.setMaxConcurrentCheckpoints(1);
            ckConf.setMinPauseBetweenCheckpoints(500);
            ckConf.enableExternalizedCheckpoints(
                    CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

            ckConf.enableUnalignedCheckpoints();

            /** Consumer Kafka */
            String srcKafkaTopic = paramReader.getTmpKfkTopic();
            String srcKafkaBootStrapServer = paramReader.getTmpKfkBootStrapServer();
            String srckfkGrupId = paramReader.getTmpKfkGroupId();
            int sinkKfkPartitions = paramReader.getKfkPartitions();

            Properties consumProp = new Properties();
            consumProp.setProperty("group.id", srckfkGrupId);
            consumProp.setProperty(
                    "key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            consumProp.setProperty(
                    "value.deserializer",
                    "org.apache.kafka.common.serialization.StringDeserializer");

            /** set offset to latest */
            KafkaSource<String> kfkSource =
                    KafkaSource.<String>builder()
                            .setBootstrapServers(srcKafkaBootStrapServer)
                            .setTopics(srcKafkaTopic)
                            .setStartingOffsets(OffsetsInitializer.earliest())
                            .setProperties(consumProp)
                            .setValueOnlyDeserializer(new SimpleStringSchema())
                            .build();

            SingleOutputStreamOperator<String> input =
                    env.fromSource(
                                    kfkSource,
                                    WatermarkStrategy.noWatermarks(),
                                    "correctAttribution")
                            .uid("kfk-correctAttribution-source");

            SingleOutputStreamOperator<UserInfo> formatInput = input.setParallelism(1)
                    .keyBy(new KeySelector<String, Integer>() {
                        @Override
                        public Integer getKey(String value) throws Exception {
                            return 0;
                        }
                    }).flatMap(new formatInputStreamFunc())
//                    .setParallelism(sinkKfkPartitions)
                    .keyBy(new KeySelector<UserInfo, String>() {
                        @Override
                        public String getKey(UserInfo value) throws Exception {
                            return value.getUserKey();
                        }
                    }).map(new RichMapFunction<UserInfo, UserInfo>() {
                        @Override
                        public UserInfo map(UserInfo value) throws Exception {
                            return value;
                        }
                    });

            SingleOutputStreamOperator<JSONObject> resStream =
                    AsyncDataStream.unorderedWait(
                            formatInput, new DorisAsyncRichMapFunction(), 15, TimeUnit.SECONDS, 10);

            //在resStream里保证包含userKey
            SingleOutputStreamOperator<String> out =
                    resStream.flatMap(
                            new RichFlatMapFunction<JSONObject, String>() {
                                @Override
                                public void flatMap(JSONObject value, Collector<String> out)
                                        throws Exception {
                                    if (value != null) {
                                        if (value.containsKey(USERKEY)
                                                && value.getString(USERKEY) != null) {
                                            out.collect(value.toString());
                                        }
                                    }
                                }
                            });

//                        out.print();

            /** sink to Kafka */
            String sinkkfkTopic = paramReader.getKfkTopic();
            Properties sinkProp = new Properties();
            sinkProp.setProperty("bootstrap.servers", paramReader.getKfkBootStrapServer());
            //            sinkProp.setProperty("max.request.size", String.valueOf(KFKMSGMAXSIZE));

            FlinkKafkaProducer<String> kafkaProducer =
                    new FlinkKafkaProducer<String>(
                            sinkkfkTopic,
                            new WeKafkaKeyedSerializationSchema(),
                            sinkProp,
                            Optional.of(new WeKafkaCustomPartitioner()));

            out.addSink(kafkaProducer).setParallelism(sinkKfkPartitions);

            env.execute(CorrectAttribution.class.toString());
        } catch (Exception e) {
            System.out.println("Exception: " + e.getMessage());
            LOG.error("Exception: " + e.getMessage());
            e.printStackTrace();
        }
    }

    private static class formatInputStreamFunc extends RichFlatMapFunction<String, UserInfo> {
        ValueState<Integer> countState;
        @Override
        public void open(Configuration parameters) throws Exception {
            countState = getRuntimeContext()
                    .getState(new ValueStateDescriptor<Integer>("CountState", Integer.class));
        }

        @Override
        public void flatMap(String value, Collector<UserInfo> out) throws Exception {
            try{
                if(value != null) {
                    JSONObject inputJson = JSONObject.parseObject(value);
                    if(inputJson != null) {
                        Integer tmpState = countState.value();
                        Integer curState = (tmpState == null ? new Integer(0) : tmpState);

                        String userKey = inputJson.getString(USERKEY);
                        if(userKey == null) {
                            throw new Exception("userKey Excep!!");
                        } else {
                            String deviceId = inputJson.getString(DEVICEID);
                            if (deviceId == null || deviceId.length() != DEVICEIDLEN) {
                                throw new Exception("deviceId Excep!!");
                            } else {
                                long uid = inputJson.getLongValue(UID);
                                if(uid == 0) {
                                    throw new Exception("uid excep!!");
                                }
                                String originSource = inputJson.getString(ORIGINSOURCE);
                                String type = inputJson.getString(TYPE);
                                long actionTime = inputJson.getLongValue(ACTIONTIME);
                                if (actionTime == 0) {
                                    throw new Exception("actionTime excep!!");
                                }

                                String action = inputJson.getString(ACTION);
                                if(action == null || (!"login".equals(action) && !"audit".equals(action))) {
                                    throw new Exception("action excep!!");
                                }
                                String bizId = inputJson.getString(BIZID);

                                int curCnt = curState.intValue() + 1;
                                //重置cnt
                                curCnt = (curCnt > RESETCNT ? (curCnt - RESETCNT) : curCnt);

                                out.collect(new UserInfo(userKey, uid, deviceId, originSource, type, action,
                                        actionTime, curCnt, bizId));
                                countState.update(Integer.valueOf(curCnt));
                            }
                        }
                    }

                }
            } catch (Exception e) {
                LOG.error("Excep :" + e.toString());
                LOG.error("Excep input: " + value);
            }

        }
    }
    private static class WeKafkaKeyedSerializationSchema
            implements KeyedSerializationSchema<String> {
        @Override
        public byte[] serializeKey(String element) {
            JSONObject inputJson = JSONObject.parseObject(element);
            return inputJson.getString(USERKEY).getBytes();
        }

        @Override
        public byte[] serializeValue(String element) {
            return element.getBytes();
        }

        @Override
        public String getTargetTopic(String element) {
            return null;
        }
    }

    private static class WeKafkaCustomPartitioner extends FlinkKafkaPartitioner<String> {

        @Override
        public int partition(
                String record, byte[] key, byte[] value, String targetTopic, int[] partitions) {
            int partition = Math.abs(new String(key).hashCode() % partitions.length);
            if (LOG.isDebugEnabled()) {
                LOG.info(
                        " partitions: "
                                + partitions.length
                                + " partition: "
                                + partition
                                + " key: "
                                + new String(key));
            }
            return partition;
        }
    }
}
