package com.we.risk.smsInfo.atoV0;

import com.alibaba.fastjson.JSONObject;
import com.we.doris.SinkDoris;
import com.we.flink.utils.WeKafkaPropertyReader;
import org.apache.doris.flink.sink.DorisSink;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Properties;

public class AtoSmsV0 {
    public static final String RELEASEPROP =
            "risk/smsInfo/var/adm_user_label_kfk_prod.properties";
    public static Logger LOG = LoggerFactory.getLogger(AtoSmsV0.class);
    public static final String SmsdataTYPE = "HFQ_UPLOAD_SMS_REPORT_RECORD";
    public static final String INPUTKEYBY = "userKey";
    public static final String OUTKEYBY = "user_key";
    public static final String INPUTJOBID = "jobID";
    public static final String OUTJOBID = "job_id";
    public static final String INPUTTIME = "timestamp";
    public static final String RECVTIME = "recv_time";
    public static final String SMSLIST = "smsInfoList";
    public static final String DORISFENODES = "10.10.16.212:8030,10.10.16.217:8030,10.10.16.218:8030,10.10.16.220:8030,10.10.16.221:8030";

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        try {
            WeKafkaPropertyReader paramReader = WeKafkaPropertyReader.init(RELEASEPROP);
            /** RocksDB */
            env.setStateBackend(new RocksDBStateBackend(paramReader.getRocksDBBackendUrl()));
            /** checkpoint configure */
            CheckpointConfig ckConf = env.getCheckpointConfig();
            ckConf.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
            ckConf.setCheckpointInterval(5 * 1000); // ms
            ckConf.setCheckpointTimeout(5 * 60 * 1000);
            ckConf.setMaxConcurrentCheckpoints(1);
            ckConf.setMinPauseBetweenCheckpoints(500);
            ckConf.enableExternalizedCheckpoints(
                    CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

            ckConf.enableUnalignedCheckpoints();

            /** Consumer Kafka */
            String srcKafkaTopic = paramReader.getTmpKfkTopic();
            String srcKafkaBootStrapServer = paramReader.getTmpKfkBootStrapServer();
            int tmpKfkPartitions = paramReader.getTmpKfkPartitions();
            String srckfkGrupId = paramReader.getTmpKfkGroupId();

            Properties consumProp = new Properties();
            consumProp.setProperty("group.id", srckfkGrupId);
            consumProp.setProperty(
                    "key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            consumProp.setProperty(
                    "value.deserializer",
                    "org.apache.kafka.common.serialization.StringDeserializer");

            KafkaSource<String> kfkSource =
                    KafkaSource.<String>builder()
                            .setBootstrapServers(srcKafkaBootStrapServer)
                            .setTopics(srcKafkaTopic)
                            .setStartingOffsets(OffsetsInitializer.earliest())
                            .setProperties(consumProp)
                            .setValueOnlyDeserializer(new SimpleStringSchema())
                            .build();

            /** distributed cache for Sougou+Cuishou Tag + mob_area_clean Data */

            SingleOutputStreamOperator<String> input =
                    env.fromSource(kfkSource, WatermarkStrategy.noWatermarks(), "Sms")
                            .uid("Sms-AtoV0-source")
                            .setParallelism(tmpKfkPartitions);

            SingleOutputStreamOperator<String> out = input
                    .filter(
                            new FilterFunction<String>() {
                                @Override
                                public boolean filter(String value) throws Exception {
                                    JSONObject inputJson = JSONObject.parseObject(value);
                                    String dataJson = inputJson.getString("dataJson");
                                    JSONObject dataJSONParse = JSONObject.parseObject(dataJson);
//                                    LOG.info(dataJSONParse.toJSONString());
                                    if (SmsdataTYPE.equals(inputJson.getString("type")) && dataJSONParse.containsKey(INPUTKEYBY)
                                            && dataJSONParse.getString(INPUTKEYBY) != null
//                                            && Long.parseLong(dataJSONParse.getString("timestamp")) >= 1692028800000L
                                            && Long.parseLong(dataJSONParse.getString("timestamp")) < 1692028800000L
                                    ) {

                                        return true;
                                    }
                                    return false;
                                }
                            })
                    .keyBy(
                            new KeySelector<String, String>() {
                                @Override
                                public String getKey(String value) throws Exception {
                                    JSONObject inputJson = JSONObject.parseObject(value);
                                    String dataJson = inputJson.getString("dataJson");
                                    JSONObject dataJSONParse = JSONObject.parseObject(dataJson);
                                    return dataJSONParse.getString(INPUTKEYBY);
                                }
                            })
                    .flatMap(new AtoSmsRichFlatMapFunc());

            //等待doris建表
            DorisSink<String> dorisSink = SinkDoris.buidJsonArrayDorisSink(DORISFENODES,
                    "ato_var_sms.var_230817",
                    "sms_var_230817",
                    "doris",
                    "wetc@#dw23",
                    "recv_time");

            out.sinkTo(dorisSink).setParallelism(2);
            out.print();

            env.execute(AtoSmsV0.class.toString());

        } catch (Exception e) {
            System.out.println("Exception: " + e.getMessage());
//            LOG.error("Exception: " + e.getMessage());
            e.printStackTrace();
        }

    }


}
