package liveData.migration.toPhoenix.task;

import com.alibaba.fastjson.JSONObject;
import com.twitter.chill.protobuf.ProtobufSerializer;
import liveData.migration.toPhoenix.entity.FlatMessage;
import liveData.migration.toPhoenix.entity.JobSetting;
import liveData.migration.toPhoenix.io.KafkaDeserialFlaMess;
import liveData.migration.toPhoenix.sink.CommonPhoenixSink;
import liveData.migration.toPhoenix.source.KafkaSource;
import liveData.migration.toPhoenix.util.KafkaProperties;
import liveData.migration.toPhoenix.util.StringUtils;
import liveData.migration.toPhoenix.util.partitionUtils.UidKeySelecter;
import liveData.migration.toPhoenix.util.partitionUtils.UidPartitioner;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.sink.PrintSinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import static liveData.migration.toPhoenix.entity.ConsumerOffset.EARLIEST;
import static liveData.migration.toPhoenix.entity.ConsumerOffset.GROUPOFFSETS;
import static liveData.migration.toPhoenix.entity.ConsumerOffset.LATEST;
import static liveData.migration.toPhoenix.util.DmlType.DELETE;
import static liveData.migration.toPhoenix.util.DmlType.INSERT;
import static liveData.migration.toPhoenix.util.DmlType.UPDATE;
import static liveData.migration.toPhoenix.util.FlinkSettings.assignTwFlat;
import static liveData.migration.toPhoenix.util.FlinkSettings.checkpointSettings;

/**
 * @program: userPortrait
 * @description:
 * @author: sunteng
 * @create: 2020-07-20 11:24
 **/
@Slf4j
public class CommonTaskTest {

    public static void dealDataStream(JobSetting jobSetting) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.getConfig().registerTypeWithKryoSerializer(JobSetting.class, ProtobufSerializer.class);

        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        int parallelism = jobSetting.getParallelism();
        env.setParallelism(parallelism);

        checkpointSettings(env);

        String topic = jobSetting.getSource().getTopic();

        Properties kafkaProperties = KafkaProperties.getInstance().getProp(jobSetting.getSource().getServer());
        KafkaSource kafkaSource = KafkaSource.getInstance();

        FlinkKafkaConsumer<FlatMessage> dbConsumer =
                kafkaSource.getKafkaSource(topic, new KafkaDeserialFlaMess(), kafkaProperties);

        List<Long> offsets = jobSetting.getSource().getPartitionOffsets();
        String offsetFlag = jobSetting.getSource().getOffsetFlag();

        if (offsets != null && offsets.size() > 0) {
            Map<KafkaTopicPartition, Long> offsetMap = new HashMap<>(offsets.size());
            final int[] pos = {0};
            offsets.forEach(a -> {
                offsetMap.put(new KafkaTopicPartition(topic, pos[0]), a);
                pos[0]++;
            });
            dbConsumer.setStartFromSpecificOffsets(offsetMap);
        } else {
            if (StringUtils.isEmpty(offsetFlag)) {
                dbConsumer.setStartFromLatest();
            } else {
                if (offsetFlag.equals(LATEST.getClass())) {
                    dbConsumer.setStartFromLatest();
                } else if (offsetFlag.equals(EARLIEST.getOffsetFlag())) {
                    dbConsumer.setStartFromEarliest();
                } else if (offsetFlag.equals(GROUPOFFSETS.getOffsetFlag())) {
                    dbConsumer.setStartFromGroupOffsets();
                } else {
                    dbConsumer.setStartFromLatest();
                }
            }
        }

        assignTwFlat(dbConsumer);

        DataStream<FlatMessage> dbStream = env
                .addSource(dbConsumer)
                .setParallelism(jobSetting.getSource().getPartitionParallel())
                .name("source")
                .uid("source");

        String table = jobSetting.getSource().getMysqlTable();

        String mysqlPk = jobSetting.getSource().getMysqlPk();
        if (StringUtils.isEmpty(mysqlPk)) {
            log.error("mysqlPk is null,please check setting");
        }


        final OutputTag<FlatMessage> outputTagDML = new OutputTag<FlatMessage>("dml") {
        };
        final OutputTag<FlatMessage> outputTagOther = new OutputTag<FlatMessage>("other") {
        };
        final OutputTag<FlatMessage> outputTagDDL = new OutputTag<FlatMessage>("ddl") {
        };

        SingleOutputStreamOperator<FlatMessage> outStream = dbStream
                .partitionCustom(new UidPartitioner(), new UidKeySelecter(mysqlPk))
                .filter(new FilterFunction<FlatMessage>() {
                    @Override
                    public boolean filter(FlatMessage value) throws Exception {
                        log.debug("##filter操作中将要被处理的数据的表名:[{}],库名:[{}],数据操作类型是否DDL:[{}]", value.getTable(), value.getDatabase(), value.getIsDdl());

                        if (value == null) {
                            return false;
                        } else {
                            if (table.equals(value.getTable())) {
                                log.debug("##匹配到的符合条件的表名：[{}],数据:[{}]", value.getTable(), value);
                                return true;
                            } else {
                                log.debug("##被过滤的数据DML表名不匹配：[{}]", value);
                                return false;
                            }
                        }
                    }
                })
                .name("filter data")
                .uid("filter data")
                .forward()
                .process(new ProcessFunction<FlatMessage, FlatMessage>() {
                    @Override
                    public void processElement(FlatMessage value, Context ctx, Collector<FlatMessage> collector) throws Exception {

                        if (value.getIsDdl()) {
                            ctx.output(outputTagDDL, value);
                        } else {
                            if (value.getType().equals(INSERT.toString()) || value.getType().equals(UPDATE.toString()) || value.getType().equals(DELETE.toString())) {
                                ctx.output(outputTagDML, value);
                            } else {
                                ctx.output(outputTagOther, value);
                            }
                        }
                    }
                })
                .name("output data by type")
                .uid("output data by type");

        DataStream<FlatMessage> otherDataStream = outStream.getSideOutput(outputTagOther);
        DataStream<FlatMessage> dmlDataStream = outStream.getSideOutput(outputTagDML);
        DataStream<FlatMessage> ddlDataStream = outStream.getSideOutput(outputTagDDL);

        if (otherDataStream != null) {
            otherDataStream.rescale()
                    .addSink(new PrintSinkFunction<FlatMessage>("other data", false))
                    .name("other data print")
                    .uid("other data print");
        }

        if (ddlDataStream != null) {
            ddlDataStream.rescale()
                    .addSink(new CommonPhoenixSink(jobSetting))
                    .name("ddl data sink")
                    .uid("ddl data sink");
        }

        if (dmlDataStream != null) {
            dmlDataStream
                    .partitionCustom(new UidPartitioner(), new UidKeySelecter(mysqlPk))
                    .addSink(new CommonPhoenixSink(jobSetting))
                    .name("dml data sink")
                    .uid("dml data sink");
        }

        System.out.println(env.getExecutionPlan());
        env.execute(jobSetting.getTaskName());

    }

    public static void main(String[] args) throws Exception {
        System.out.println("##解析json配置文件");
//        String job = ParameterTool.fromArgs(args).getRequired("job");
//
////        String job = "jobSetting.json";
//        String jobJson;
//        try {
//            File file = new File(job);
//            FileInputStream in = new FileInputStream(file);
//            byte[] fileContent = new byte[(int) file.length()];
//            in.read(fileContent);
//            in.close();
//            jobJson = new String(fileContent, StandardCharsets.UTF_8);
//            System.out.println("##读取的任务配置信息jobSetting is :: " + JSONObject.toJSON(jobJson));
//        } catch (Exception e) {
//            throw new RuntimeException(e);
//        }

        JobSetting jobSetting = JSONObject.parseObject("{\n" +
                        "  \"parallelism\": 6,\n" +
                        "  \"sink\": {\n" +
                        "    \"mode\": \"phoenix\",\n" +
                        "    \"phoenixDriver\": \"org.apache.phoenix.jdbc.PhoenixDriver\",\n" +
                        "    \"phoenixTable\": \"verify_result_phoenix\",\n" +
                        "    \"phoenixUrl\": \"jdbc:phoenix:10.10.12.215,10.10.12.216,10.10.12.129:2181:/hbase\"\n" +
                        "  },\n" +
                        "  \"source\": {\n" +
                        "    \"mode\": \"kafka\",\n" +
                        "    \"mysqlPk\": \"id\",\n" +
                        "    \"mysqlTable\": \"verify_result\",\n" +
                        "    \"offsetFlag\": \"earliest\",\n" +
                        "    \"partitionParallel\": 3,\n" +
                        "    \"server\": \"10.10.15.82:9092\",\n" +
                        "    \"topic\": \"risk.management.mysql.paydayloan.verify.result\"\n" +
                        "  },\n" +
                        "  \"taskName\": \"VERIFY_RESULT_PHOENIX\"\n" +
                        "}\n"
                , JobSetting.class);
        System.out.println("##jobSetting ##table集合is::" + jobSetting.getSource().getMysqlTable());
        dealDataStream(jobSetting);
    }
}
