package com.leiyuee.flink.batch.kafka;

import com.alibaba.fastjson.JSONObject;
import com.leiyuee.flink.tools.SinkBuilder;
import com.leiyuee.flink.tools.StatementUtil;
import com.leiyuee.flink.tools.constants.MySQLConstant;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.TimestampData;
import org.apache.flink.types.RowKind;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.TimeUnit;

/**
 * @author Yu_Lei
 * @date 2024/7/9
 */
public class KafkaJob {

    /**
     * 检查点路径前缀
     */
    static String DIR_PREFIX = "trace" + "_";

    /**
     * 对应类名称
     */
    static String SIMPLE_NAME = KafkaJob.class.getSimpleName();

    /**
     * Job 名称
     */
    static String JOB_NAME = SIMPLE_NAME + " Stream Job";
    public static void main(String[] args) throws Exception {
        Configuration envConf = new Configuration();
        envConf.setString("parallelism.default", "1");
        envConf.setString("table.local-time-zone", "Asia/Shanghai");

        envConf.setString("execution.checkpointing.interval", "60000");
        envConf.setString("execution.checkpointing.externalized-checkpoint-retention", "RETAIN_ON_CANCELLATION");
        envConf.setString("execution.checkpointing.max-concurrent-checkpoints", "1");
        envConf.setString("execution.checkpointing.min-pause", "1000");
        envConf.setString("execution.checkpointing.mode", "EXACTLY_ONCE");
        envConf.setString("execution.checkpointing.timeout", "300000");
        envConf.setString("execution.checkpointing.tolerable-failed-checkpoints", "10");

        envConf.setString("state.checkpoints.num-retained", "3");
        envConf.setString("state.backend.incremental", "true");

        envConf.setString("restart-strategy", "fixed-delay");
        envConf.setString("restart-strategy.fixed-delay.attempts", "3");
        envConf.setString("restart-strategy.fixed-delay.delay", "10 s");

        envConf.setString("execution.runtime-mode", "streaming");
        envConf.setString("sql-client.execution.result-mode","tableau");

        envConf.setString("pipeline.time-characteristic", "EventTime");
        envConf.setString("pipeline.auto-watermark-interval", "1000");
        envConf.setString("execution.buffer-timeout.enabled","true");
        envConf.setString("execution.buffer-timeout.interval","2000");

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(envConf);
        env.setParallelism(1);
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10,
                Time.of(10L, TimeUnit.MINUTES),
                Time.of(10L, TimeUnit.SECONDS)));
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(60000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        env.getCheckpointConfig().setCheckpointStorage(new FileSystemCheckpointStorage("hdfs://hdp-cluster/flink/flink-checkpoints/"+ DIR_PREFIX + SIMPLE_NAME));
        System.setProperty("HADOOP_USER_NAME", "jabil");
        env.enableCheckpointing(60000L);

        String kafka_topicName = "cnhuam0ldsqlv1b.dbo.CR_Calibration";// topic 名称
        String kafka_bootStrpServers = "cnhuam0itpoc85:9092";
        String kafka_consumer_group = "connect-cluster";

        // Kafka 配置
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", kafka_bootStrpServers);
        properties.setProperty("group.id", kafka_consumer_group);
        properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");  //key反序列化

        properties.setProperty("scan.startup.mode", "latest-offset");
        properties.setProperty("format", "debezium-json");

        FlinkKafkaConsumer<RowData> kafkaSource = new FlinkKafkaConsumer<>(kafka_topicName, new KafkaDeserializationSchema<RowData>() {
            @Override
            public boolean isEndOfStream(RowData nextElement) {
                return false;
            }

            @Override
            public RowData deserialize(ConsumerRecord<byte[], byte[]> record) throws Exception {
                // 创建 RowData 对象并填充数据
                GenericRowData row = null;
                if(record != null && record.value() != null) {
                    String valueStr = new String(record.value(), StandardCharsets.UTF_8);
                    // 将 value 的数据获取出来，源数据
                    final JSONObject valueJson = JSONObject.parseObject(valueStr);
                    RowKind rowKind = RowKind.INSERT;
                    // 操作数据
                    JSONObject operateJson = valueJson;

                    if(valueJson.containsKey("op")) {
                        String type = valueJson.get("op").toString().toLowerCase();
                        if (type.equals("d") || type.equals("delete")) {
                            rowKind = RowKind.DELETE;
                            operateJson = valueJson.getJSONObject("before");
                        } else if (type.equals("u") || type.equals("update")) {
                            rowKind = RowKind.UPDATE_AFTER;
                            operateJson = valueJson.getJSONObject("after");
                        } else {
                            operateJson = valueJson.getJSONObject("after");
                        }
                    }

                    row = new GenericRowData(rowKind, 20);
                    row.setField(0, operateJson.getInteger("Calibration_ID"));// Calibration_ID
                    row.setField(1, StringData.fromString(operateJson.getString("CalibrationCode")));// CalibrationCode
                    row.setField(2, operateJson.getInteger("Equipment_ID"));// Equipment_ID
                    row.setField(3, StringData.fromString(operateJson.getString("Laboratory_ID")));// Laboratory_ID
                    row.setField(4, StringData.fromString(operateJson.getString("CalType")));// CalType
                    row.setField(5, StringData.fromString(operateJson.getString("Descr")));// Descr
                    row.setField(6, TimestampData.fromEpochMillis(operateJson.getLong("StartDate")));// StartDate
                    row.setField(7, TimestampData.fromEpochMillis(operateJson.getLong("EndDate")));// EndDate
                    row.setField(8, operateJson.getInteger("Length"));// Length
                    row.setField(9, StringData.fromString(operateJson.getString("AttachFilePath")));// AttachFilePath
                    row.setField(10, StringData.fromString(operateJson.getString("ConfirmFlag")));// ConfirmFlag
                    row.setField(11, StringData.fromString(operateJson.getString("IsActive")));// IsActive
                    row.setField(12, StringData.fromString(operateJson.getString("Processor")));// Processor
                    row.setField(13, operateJson.getInteger("UserID_ID"));// UserID_ID
                    row.setField(14, TimestampData.fromEpochMillis(operateJson.getLong("LastUpdated")));// LastUpdated
                    row.setField(15, TimestampData.fromEpochMillis(operateJson.getLong("plandate")));// plandate

                    long timestamp = System.currentTimeMillis();
                    String timeString = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(timestamp);
                    String year = timeString.substring(0, 4);
                    String mon = timeString.substring(5, 7);
                    String day = timeString.substring(8, 10);

                    row.setField(16, TimestampData.fromEpochMillis(timestamp));// ts
                    row.setField(17, StringData.fromString(year));// partition_year
                    row.setField(18, StringData.fromString(mon));// partition_mon
                    row.setField(19, StringData.fromString(day));// partition_day

                }
                return row;
            }

            @Override
            public TypeInformation<RowData> getProducedType() {
                return TypeInformation.of(RowData.class);
            }
        }, properties);
        DataStream<RowData> dataStream = env.addSource(kafkaSource);

        // hudi 字段&类型配置
        List<String> targetFieldList = Arrays.asList(
                "`Calibration_ID` INT",
                "`CalibrationCode` STRING",
                "`Equipment_ID` INT",
                "`Laboratory_ID` STRING",
                "`CalType` STRING",
                "`Descr` STRING",
                "`StartDate` TIMESTAMP",
                "`EndDate` TIMESTAMP",
                "`Length` INT",
                "`AttachFilePath` STRING",
                "`ConfirmFlag` STRING",
                "`IsActive` STRING",
                "`Processor` STRING",
                "`UserID_ID` INT",
                "`LastUpdated` TIMESTAMP",
                "`plandate` TIMESTAMP",
                "`ts` TIMESTAMP",
                "`partition_year` STRING",
                "`partition_mon` STRING",
                "`partition_day` STRING"
        );
        String pk = "Calibration_ID";// 主键
        String targetTable = "CR_Calibration";// 目标表
        String targetHiveTable = "Calibration_CR_Calibration_hudi_hive";// hive 目标表
        String basePath = "hdfs://hdp-cluster/hua-datalake/ods/traceability/Calibration/CR_Calibration";// HDFS 路径
        String partitions = "partition_year,partition_mon,partition_day";// 分区字段可为空字符串
        Map<String, String> options = new HashMap<>();
        options.put(FlinkOptions.TABLE_NAME.key(), targetTable);// table.name
        options.put(FlinkOptions.TABLE_TYPE.key(), HoodieTableType.MERGE_ON_READ.name());// table.type
        options.put(FlinkOptions.RECORD_KEY_FIELD.key(), pk);// write.recordkey.field
        options.put(FlinkOptions.PRECOMBINE_FIELD.key(), "ts");// write.precombine.field
        options.put(FlinkOptions.PARTITION_PATH_FIELD.key(), partitions);// write.partitionpath.field
        options.put(FlinkOptions.HIVE_SYNC_DB.key(), "ods_traceability");// hive_sync.db
        options.put(FlinkOptions.HIVE_SYNC_METASTORE_URIS.key(), "thrift://cnhuam0itpoc85:9083");// hive_sync.metastore.uris
        options.put(FlinkOptions.HIVE_SYNC_JDBC_URL.key(), "jdbc:hive2://cnhuam0itpoc87:10000");// hive_sync.jdbc_url
        options.put(FlinkOptions.HIVE_SYNC_PASSWORD.key(), "jabil");// hive_sync.password
        options.put(FlinkOptions.HIVE_SYNC_PASSWORD.key(), "jabil");// hive_sync.password
        //options.put(FlinkOptions.HIVE_SYNC_PARTITION_EXTRACTOR_CLASS_NAME.key(), "org.apache.hudi.hive.MultiPartKeysValueExtractor");// hive_sync.partition_extractor_class
        //options.put(FlinkOptions.HIVE_SYNC_PARTITION_FIELDS.key(), partitions);// hive_sync.partition_fields

        SinkBuilder.sinkHoodie(targetFieldList, pk, partitions.split(","), targetTable, targetHiveTable, basePath, options).sink(dataStream, false);

        // MySQL 配置 jabilrt/jabilrt
        String mysqlDB = "RT_Trace_Calibration";// 数据库名
        String mysqlTB = "ecc_calibration";// 表名
        String mysqlURL = MySQLConstant.STG86_URL + mysqlDB;// 数据库连接
        String mysqlStr= String.format("REPLACE INTO %s.%s ", mysqlDB, mysqlTB) +
                // MySQL SQL 字段
                "(calibration_id, calibration_code, equipment_id, laboratory_id, calibration_type, descriptioniption, start_date, end_date, `length`, attachfile_path, confirm_flag, is_active, processor, userid_id, last_updated, plan_date, sync_update_time)\n" +
                "VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP);";
        String indexField = String.format("[%s]%s.%s %s", "MySQL", mysqlDB, mysqlTB, "id");

        dataStream.addSink(SinkBuilder.JdbcSink(mysqlStr,
                (statement, event) -> {
                    String index = String.format("%s[%s]", indexField, event.getInt(0));
                    statement.setInt(1, event.getInt(0));// Calibration_ID
                    statement.setString(2, StatementUtil.subValue(index, event, 1, 50));// CalibrationCode
                    statement.setInt(3, event.getInt(2));// Equipment_ID
                    statement.setString(4, StatementUtil.subValue(index, event, 3, 50));// Laboratory_ID
                    statement.setString(5, StatementUtil.subValue(index, event, 4, 50));// CalType
                    statement.setString(6, StatementUtil.subValue(index, event, 5, 300));// Descr
                    statement.setTimestamp(7, event.getTimestamp(6, 9).toTimestamp());// StartDate
                    statement.setTimestamp(8, event.getTimestamp(7, 9).toTimestamp());// EndDate
                    statement.setInt(9, event.getInt(8));// Length
                    statement.setString(10, StatementUtil.subValue(index, event, 9, 1000));// AttachFilePath
                    statement.setString(11, StatementUtil.subValue(index, event, 10, 50));// ConfirmFlag
                    statement.setString(12, StatementUtil.subValue(index, event, 11, 50));// IsActive
                    statement.setString(13, StatementUtil.subValue(index, event, 12, 50));// Processor
                    statement.setInt(14, event.getInt(13));// UserID_ID
                    statement.setTimestamp(15, event.getTimestamp(14, 9).toTimestamp());// LastUpdated
                    statement.setTimestamp(16, event.getTimestamp(15, 9).toTimestamp());// plandate
                }
                , mysqlURL, MySQLConstant.STG86_USER_JABIL, MySQLConstant.STG86_USER_JABIL_PWD));

        env.execute(JOB_NAME);
    }

}
