package com.we.risk.phoneRecord.stage2.dimphone;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.we.flink.utils.WeKafkaPropertyReader;
import com.we.risk.phoneRecord.common.PhoneRecordDataUtils;
import com.we.risk.phoneRecord.common.Stage2GlobalSetting;
import com.we.utils.ParamCheck;
import org.apache.commons.io.FileUtils;
import org.apache.doris.flink.cfg.DorisExecutionOptions;
import org.apache.doris.flink.cfg.DorisOptions;
import org.apache.doris.flink.cfg.DorisReadOptions;
import org.apache.doris.flink.sink.DorisSink;
import org.apache.doris.flink.sink.writer.SimpleStringSerializer;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.streaming.util.serialization.KeyedSerializationSchema;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import java.util.Properties;

public class DimPhoneRecordLabelVarTest {
    public static final String RELEASEPROP =
            "risk/phonerecord/stage2/dim_phone_kfk_self_test.properties";
    public static Logger LOG = LoggerFactory.getLogger(DimPhoneRecordLabelVarTest.class);

    public static final String SPLITWORD = "#";
    public static final String DATEFORMAT = "yyyy-MM-dd";
    public static final String MSGTYPE = "HFQ_UPLOAD_PHONE_CALL_RECORD";
    public static final String INPUTKEYBY = "userKey";
    public static final String OUTPUTKEY = "user_key";
    public static final String TIMESTAMP = "timestamp";
    public static final String JOBID = "jobID";
    public static final String PHONECALLLIST = "phoneCallList";
    public static final String REMOVEJSONKEY = "numberType";
    public static final Long KFKMSGMAXSIZE = 50 * 1024 * 1024L;

    public static void main(String[] args) throws IOException {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        try {
            WeKafkaPropertyReader paramReader = WeKafkaPropertyReader.init(RELEASEPROP);
            /** RocksDB */
            env.setStateBackend(new RocksDBStateBackend(paramReader.getRocksDBBackendUrl()));
            /** checkpoint configure */
            CheckpointConfig ckConf = env.getCheckpointConfig();
            ckConf.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
            ckConf.setCheckpointInterval(10 * 1000); // ms
            ckConf.setCheckpointTimeout(5* 60 * 1000);
            ckConf.setMaxConcurrentCheckpoints(1);
            ckConf.setMinPauseBetweenCheckpoints(500);
            ckConf.enableExternalizedCheckpoints(
                    CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

            ckConf.enableUnalignedCheckpoints();

            /** Consumer Kafka */
            String srcKafkaTopic = paramReader.getTmpKfkTopic();
            String srcKafkaBootStrapServer = paramReader.getTmpKfkBootStrapServer();
            String srckfkGrupId = paramReader.getTmpKfkGroupId();

            Properties consumProp = new Properties();
            consumProp.setProperty("group.id", srckfkGrupId);
            consumProp.setProperty(
                    "key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            consumProp.setProperty(
                    "value.deserializer",
                    "org.apache.kafka.common.serialization.StringDeserializer");

            /** set offset to latest */
            KafkaSource<String> kfkSource =
                    KafkaSource.<String>builder()
                            .setBootstrapServers(srcKafkaBootStrapServer)
                            .setTopics(srcKafkaTopic)
                            .setStartingOffsets(OffsetsInitializer.earliest())
                            .setProperties(consumProp)
                            .setValueOnlyDeserializer(new SimpleStringSchema())
                            .build();

            /** distributed cache for Sougou+Cuishou Tag + mob_area_clean Data */
//            env.registerCachedFile(
//                    Stage2GlobalSetting.V1CLEANDATA, Stage2GlobalSetting.V1CLEANCATCHFILENAME);
//            env.registerCachedFile(
//                    Stage2GlobalSetting.SGCSTAGDATA, Stage2GlobalSetting.SGCSCATCHFILENAME);
//            env.registerCachedFile(
//                    Stage2GlobalSetting.MOBAREADATA, Stage2GlobalSetting.MOBAREAFILENAME);

            SingleOutputStreamOperator<String> input =
                    env.fromSource(kfkSource, WatermarkStrategy.noWatermarks(), "PhoneRecord")
                            .uid("kfk-PhoneRecordLabel-source");

            SingleOutputStreamOperator<String> out =
                    input.flatMap(new PhoneRecordDataFilter())
                            .keyBy(
                                    new KeySelector<String, String>() {
                                        @Override
                                        public String getKey(String value) throws Exception {
                                            JSONObject jsonObject = JSONObject.parseObject(value);
                                            return jsonObject.getString(OUTPUTKEY);
                                        }
                                    })
                            .flatMap(new PhoneRecordDataRichFlatMapFunc());

            DorisSink<String> stringDorisSink = buidDorisSink("10.10.148.68:8030,10.10.148.69:8030,10.10.148.74:8030",
                    "call_record.var_20230621",
                    "doris",
                    "wetc@#dw23",
                    "recv_time");

            out.sinkTo(stringDorisSink).setParallelism(4);
//            /** sink to Kafka */
//            String sinkkfkTopic = paramReader.getKfkTopic();
//            Properties sinkProp = new Properties();
//            sinkProp.setProperty("bootstrap.servers", paramReader.getKfkBootStrapServer());
//            sinkProp.setProperty("max.request.size", String.valueOf(KFKMSGMAXSIZE));
//            int sinkkfkPartitions = paramReader.getKfkPartitions();
//
//            FlinkKafkaProducer<String> kafkaProducer =
//                    new FlinkKafkaProducer<String>(
//                            sinkkfkTopic,
//                            new WeKafkaKeyedSerializationSchema(),
//                            sinkProp,
//                            Optional.of(new WeKafkaCustomPartitioner()));
//
//            out.addSink(kafkaProducer).setParallelism(sinkkfkPartitions);

            env.execute(DimPhoneRecordLabelVarTest.class.toString());
        } catch (Exception e) {
            System.out.println("Exception: " + e.getMessage());
            LOG.error("Exception: " + e.getMessage());
            e.printStackTrace();
        }
    }

    private static class PhoneRecordDataFilter extends RichFlatMapFunction<String, String> {
        @Override
        public void flatMap(String input, Collector<String> out) throws Exception {
            try {
                if (input.contains(MSGTYPE)) {
                    JSONObject inputJson = JSONObject.parseObject(input);
                    if (inputJson != null && inputJson.getString("type").equals(MSGTYPE)) {
                        JSONObject dataJson = inputJson.getJSONObject("dataJson");
                        Long captureTime = dataJson.getLong(TIMESTAMP);
                        JSONObject job = dataJson.getJSONObject("job");
                        String jobID = job.getString(JOBID);

                        String userKey = dataJson.getString(INPUTKEYBY);
                        JSONObject data = dataJson.getJSONObject("data");
                        JSONArray calls = data.getJSONArray(PHONECALLLIST);

                        if (userKey != null) {
                            JSONObject outJson = new JSONObject();
                            outJson.put(OUTPUTKEY, userKey);

                            outJson.put(TIMESTAMP, captureTime);
                            outJson.put(JOBID, jobID);
                            outJson.put(PHONECALLLIST, calls);
                            out.collect(outJson.toString());
                        }
                    }
                }
            } catch (Exception e) {
                LOG.error(e.getMessage());
                LOG.error("flatMap1 error input: " + input);
                e.printStackTrace();
            }
        }
    }

    private static class PhoneRecordDataRichFlatMapFunc
            extends RichFlatMapFunction<String, String> {
        HashMap<String, String> mobAreaMap;
        SimpleDateFormat dateFormat;
        HashMap<String, String> mapV1Clean = new HashMap<>();
        HashMap<String, String> sgcsTagMap = new HashMap<>();
        ValueState<Integer> callRecordNumVState;

        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            dateFormat = new SimpleDateFormat(DATEFORMAT);
            callRecordNumVState = getRuntimeContext()
                    .getState(new ValueStateDescriptor<Integer>("callRecordNumVState", Integer.class));
        }

        @Override
        public void flatMap(String value, Collector<String> out) throws Exception {
            fakeProcess(value, out);
//            ProcessData(value, mobAreaMap, out);
        }


        private void fakeProcess(String value, Collector<String> out) {
            try {
                Integer stateCallNum = callRecordNumVState.value();
                Integer lastCallNum = (stateCallNum == null ? 0 : stateCallNum);
                JSONObject inputJson = JSONObject.parseObject(value);
                ParamCheck.isNull(inputJson, " inputJson");
                JSONArray phoneCallList = inputJson.getJSONArray("phoneCallList");
                ParamCheck.isNull(phoneCallList, "phoneCallList");
                int callSize = phoneCallList.size();

                String userKey = inputJson.getString(OUTPUTKEY);
                long recvTime = inputJson.getLongValue(TIMESTAMP);
                String jobId = inputJson.getString(JOBID);
                JSONArray outJsonArray = new JSONArray();
                for (int i = 0; i < callSize; i++) {
                    JSONObject callDetail = (JSONObject) phoneCallList.get(i);
                    JSONObject callRecordVar = new JSONObject();
                    callRecordVar.put(OUTPUTKEY, userKey);
                    callRecordVar.put("sequence_num", i);
                    callRecordVar.put("is_active", 1);
                    callRecordVar.put("recv_time", recvTime);
                    callRecordVar.put("job_id", jobId);

                    String call_number = callDetail.getString("number");
                    callRecordVar.put("phone", call_number);
                    callRecordVar.put("f_phone", call_number);
                    callRecordVar.put("mobile", call_number);
                    callRecordVar.put("phone_tags", "011110");
                    callRecordVar.put("duration", callDetail.getIntValue("duration"));
                    callRecordVar.put("mobile_prov","北京");
                    callRecordVar.put("mobile_city","北京test");
                    callRecordVar.put("mobile_opt","1");
                    callRecordVar.put("same_positions","0101011");
                    callRecordVar.put("phone_cstag", "testcs");
                    callRecordVar.put("phone_sgtag", "testsg");
                    callRecordVar.put("name", callDetail.getString("name"));
                    callRecordVar.put("name_tags", "010111");
                    callRecordVar.put("call_type", callDetail.getString("type"));
                    callRecordVar.put("call_time", callDetail.getString("date"));

                    outJsonArray.add(callRecordVar);
                }
                if(callSize < lastCallNum)
                {
                    for (int i = callSize; i < lastCallNum; i++) {
                        JSONObject expiredCall = new JSONObject();
                        expiredCall.put(OUTPUTKEY, userKey);
                        expiredCall.put("is_active", 0);
                        expiredCall.put("sequence_num", i);
                        expiredCall.put("recv_time", recvTime);
                        expiredCall.put("job_id", jobId);

                        outJsonArray.add(expiredCall);
                    }
                }
                out.collect(outJsonArray.toString());
                callRecordNumVState.update(callSize);
            }catch (Exception e) {
                LOG.error("fakeProcess exception: " + e.toString());
                LOG.error("fakeProcess exception input: " + value);
                e.printStackTrace();
            }
        }

    }

    /**
     * 不从配置文件读取,自己传入参数 构建 {@link DorisSink<String>}
     *
     * @param fenodes fe nodes
     * @param tableIdentifier doris 表名
     * @param username 用户名
     * @param password 密码
     * @param sequenceCol sequence col 列
     * @return {@link DorisSink<String>}
     */
    public static DorisSink<String> addDorisSink(
            String fenodes,
            String tableIdentifier,
            String username,
            String password,
            String sequenceCol) {
        return buidDorisSink(fenodes, tableIdentifier, username, password, sequenceCol);
    }

    /**
     * 构建 {@link DorisSink<String>}
     *
     * @param fenodes fe nodes
     * @param tableIdentifier 表名
     * @param username 用户名
     * @param password 密码
     * @param sequenceCol sequence col 列
     * @return {@link DorisSink<String>}
     */
    public static DorisSink<String> buidDorisSink(
            String fenodes,
            String tableIdentifier,
            String username,
            String password,
            String sequenceCol) {
        DorisSink.Builder<String> builder = DorisSink.builder();
        DorisOptions.Builder dorisBuilder =
                DorisOptions.builder()
                        .setFenodes(fenodes)
                        .setTableIdentifier(tableIdentifier)
                        .setUsername(username)
                        .setPassword(password);

        Properties properties = new Properties();
        properties.setProperty("format", "json");
        properties.setProperty("read_json_by_line", "true");
        properties.setProperty("strip_outer_array", "true");
        properties.setProperty("function_column.sequence_col", sequenceCol);

        DorisExecutionOptions.Builder executionBuilder =
                DorisExecutionOptions.builder()
                        // streamload label prefix
                        .setLabelPrefix("label-doris_" + System.currentTimeMillis())
                        .setStreamLoadProp(properties);
        return builder.setDorisReadOptions(DorisReadOptions.builder().build())
                .setDorisExecutionOptions(executionBuilder.build())
                // serialize according to string
                .setSerializer(new SimpleStringSerializer())
                .setDorisOptions(dorisBuilder.build())
                .build();
    }

    private static class WeKafkaKeyedSerializationSchema
            implements KeyedSerializationSchema<String> {
        @Override
        public byte[] serializeKey(String element) {
            JSONArray objects = JSONObject.parseArray(element);
            if(objects != null) {
                JSONObject jsonObject = objects.getJSONObject(0);
                if(jsonObject != null) {
                    return jsonObject.getString(OUTPUTKEY).getBytes();
                }
            }
            LOG.error("fail to get partition key!!");
            return "".getBytes();
//            JSONObject jsonObject = JSONObject.parseObject(element);
//            String keyby = jsonObject.getString(OUTPUTKEY);
//            return keyby.getBytes();
        }

        @Override
        public byte[] serializeValue(String element) {
            return element.getBytes();
        }

        @Override
        public String getTargetTopic(String element) {
            return null;
        }
    }

    private static class WeKafkaCustomPartitioner extends FlinkKafkaPartitioner<String> {

        @Override
        public int partition(
                String record, byte[] key, byte[] value, String targetTopic, int[] partitions) {
            int partition = Math.abs(new String(key).hashCode() % partitions.length);
            if (LOG.isDebugEnabled()) {
                LOG.info(
                        " partitions: "
                                + partitions.length
                                + " partition: "
                                + partition
                                + " key: "
                                + new String(key));
            }
            return partition;
        }
    }
}
