package com.sh.data.engine.kafka;

import cn.hutool.core.collection.ListUtil;
import cn.hutool.core.convert.Convert;
import cn.hutool.core.date.DateUtil;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.domain.WritePrecision;
import com.influxdb.client.write.Point;
import com.jayway.jsonpath.JsonPath;
import com.sh.data.engine.influxdb.client.InfluxDBClientUtil;
import com.sh.data.engine.iotdb.client.IoTDBSessionUtil;
import com.sh.data.engine.mbean.Context;
import com.sh.data.engine.mqtt.EngineMqttClient;
import com.sh.data.engine.opentsdb.client.OpentsdbClient;
import com.sh.data.engine.util.*;
import com.sh.data.engine.vo.OnlineTaskParam;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.hcatalog.data.DefaultHCatRecord;
import org.apache.hive.hcatalog.data.HCatRecord;
import org.apache.hive.hcatalog.data.transfer.DataTransferFactory;
import org.apache.hive.hcatalog.data.transfer.HCatWriter;
import org.apache.hive.hcatalog.data.transfer.WriteEntity;
import org.apache.hive.hcatalog.data.transfer.WriterContext;
import org.apache.iotdb.session.Session;
import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.FileInputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.security.PrivilegedExceptionAction;
import java.sql.*;
import java.time.Duration;
import java.util.Date;
import java.util.*;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

import static com.sh.data.engine.util.BatchSaveUtil.executeBatchSave;

/**
 * @auther: futang.tyf
 * @date: 2018/11/14 15:03
 * @description:
 */
public class EngineKafkaConsumer {

    private static final Logger log = LoggerFactory.getLogger(EngineKafkaConsumer.class);

    private String hosts;

    private List<String> allColumn;

    private List<String> allColumnType;

    private List<String> partitionValue;

    private List<String> toColumn;

    private List<String> fromColumn;

    private List<String> tagColumn;

    private String topic;

    private String hiveMetastoreUris;

    private String hdfsUrl;

    private String nameServices;

    private String nameNodesAddrs;

    private String nameNodes;

    private String db;

    private String table;

    private Long taskId;

    private String taskIdMD5;

    private Long recordId;

    private String taskName;

    private Date offsetTime;

    private Properties properties;

    private Map<String, Integer> columnIndex;

    private KafkaConsumer<String, String> consumer;

    private WriterContext cntxt;

    private HCatWriter writer;

    private Context context;

    // 目标数据源类型:kafka/mqtt  mysql/pg  hive
    private String targetType;

    // 目标为mysql/pg
    private String jdbcUrl;

    // js脚本解析json
    private String jsContent;

    // 是否使用js解析 0-否 1-是
    private Integer useJsFlag;

    // jsonpath 表达式
    private List<String> jsonPathExp;

    private String schema;

    private String username;

    private String password;

    private String targetTopic;

    private String targetHosts;

    private String hiveSitePath;

    private Connection connection;

    private PreparedStatement psmt;

    private Session iotdbSession;

    private String targetServer;

    private EngineMqttClient mqTargetClient;

    private Integer targetPort;

    private Map<String, String> csvData;

    public EngineKafkaConsumer(OnlineTaskParam param, Context context, Map<String, String> csvData) {
        this.hosts = param.getHosts();
        this.allColumn = param.getAllColumn();
        this.allColumnType = param.getAllColumnType();
        this.partitionValue = param.getPartitionValue();
        this.toColumn = param.getToColumn();
        this.fromColumn = param.getFromColumn();
        this.tagColumn = param.getTagColumn();
        this.topic = param.getTopic();
        this.hiveMetastoreUris = param.getHiveMetastoreUris();
        this.hdfsUrl = param.getHdfsUrl();
        this.nameServices = param.getNameServices();
        this.nameNodesAddrs = param.getNameNodesAddrs();
        this.nameNodes = param.getNameNodes();
        this.db = param.getDb();
        this.table = param.getTable();
        this.taskId = param.getTaskId();
        this.recordId = param.getRecordId();
        this.taskName = param.getTaskName();
        this.offsetTime = param.getOffsetTime();
        this.properties = param.getProperties();
        this.context = context;
        this.targetType = param.getTargetType();
        this.jdbcUrl = param.getJdbcUrl();
        this.jsContent = param.getJsContent();
        this.useJsFlag = param.getUseJsFlag();
        this.jsonPathExp = param.getJsonPathExp();
        this.schema = param.getSchema();
        this.username = param.getUsername();
        this.password = param.getPassword();
        this.targetTopic = param.getTargetTopic();
        this.targetHosts = param.getTargetHosts();
        this.taskIdMD5 = param.getTaskIdMD5();
        this.hiveSitePath = param.getHiveSitePath();
        this.targetServer = param.getTargetServer();
        this.targetPort = param.getTargetPort();
        this.csvData = csvData;
    }

    public void init() throws Exception {
        if (partitionValue == null) {
            partitionValue = Lists.newArrayList();
        }
        Map<String, Integer> columnIndex = Maps.newHashMap();
        if (CollectionUtils.isNotEmpty(allColumnType)) {
            allColumnType =
                allColumnType.stream()
                    .map(
                        i -> {
                            int flag = i.indexOf("varchar");
                            if (flag != -1) {
                                i = "varchar";
                            }
                            return i;
                        })
                    .collect(Collectors.toList());
        }

        if ("Hive".equalsIgnoreCase(targetType)) {
            for (String ac : this.allColumn) {
                for (int i = 0; i < toColumn.size(); i++) {
                    String c = toColumn.get(i);
                    if (ac.equals(c)) {
                        columnIndex.put(ac, i);
                        break;
                    }
                }
            }
            this.columnIndex = columnIndex;
        }

        // 初始化消费者
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, hosts);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, String.valueOf(taskIdMD5));
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("max.poll.records", "1");
        props.put("session.timeout.ms", "20000");
        props.put("auto.offset.reset", "earliest");
        if (properties != null) {
            for (Object key : this.properties.keySet()) {
                props.put(key, this.properties.get(key));
            }
        }

        this.consumer = new KafkaConsumer<>(props);

        if (offsetTime == null) {
            consumer.subscribe(Arrays.asList(topic));
        } else {
            // 获取topic的partition信息
            List<PartitionInfo> partitionInfos = consumer.partitionsFor(topic);
            List<TopicPartition> topicPartitions = new ArrayList<>();
            Map<TopicPartition, Long> timestampsToSearch = new HashMap<>();
            for (PartitionInfo partitionInfo : partitionInfos) {
                topicPartitions.add(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()));
                timestampsToSearch.put(
                    new TopicPartition(partitionInfo.topic(), partitionInfo.partition()),
                    offsetTime.getTime());
            }
            consumer.assign(topicPartitions);

            // 获取每个partition一个小时之前的偏移量
            Map<TopicPartition, OffsetAndTimestamp> map = consumer.offsetsForTimes(timestampsToSearch);

            OffsetAndTimestamp offsetTimestamp = null;
            log.info("开始设置各分区初始偏移量...");
            for (Map.Entry<TopicPartition, OffsetAndTimestamp> entry : map.entrySet()) {
                // 如果设置的查询偏移量的时间点大于最大的索引记录时间，那么value就为空
                offsetTimestamp = entry.getValue();
                if (offsetTimestamp != null) {
                    long offset = offsetTimestamp.offset();
                    // 设置读取消息的偏移量
                    consumer.seek(entry.getKey(), offset);
                }
            }
            log.info("设置各分区初始偏移量结束...");
        }
        if ("HIVE".equalsIgnoreCase(targetType)) {
            // 初始化hcatalog
            HiveConf hiveConf = new HiveConf(this.getClass());
            FileInputStream inputStream = new FileInputStream(hiveSitePath);
            hiveConf.addResource(inputStream);
            Iterator<Map.Entry<String, String>> itr = hiveConf.iterator();
            Map<String, String> config = new HashMap<>();
            while (itr.hasNext()) {
                Map.Entry<String, String> kv = itr.next();
                config.put(kv.getKey(), kv.getValue());
            }

            WriteEntity.Builder builder = new WriteEntity.Builder();
            WriteEntity entity = builder.withDatabase(db).withTable(table).build();
            writer = DataTransferFactory.getHCatWriter(entity, config);
            UserGroupInformation userGroupInformation = null;
            try {
                userGroupInformation = UserGroupInformation.getBestUGI(null, "hive");
                Object object =
                    userGroupInformation.doAs(
                        new PrivilegedExceptionAction<Object>() {
                            @Override
                            public Object run() throws Exception {
                                cntxt = writer.prepareWrite();
                                return null;
                            }
                        });
            } catch (Exception e) {
                log.error(e.getMessage(), e);
                throw e;
            } finally {
                if (userGroupInformation != null) {
                    try {
                        FileSystem.closeAllForUGI(userGroupInformation);
                    } catch (IOException e) {
                        log.error(e.getMessage(), e);
                    }
                }
            }
        }

        log.info("task :[{}] 任务已启动......", taskName);
    }

    int nums = 0;
    long delay = 0;

    long time = System.currentTimeMillis();

    /**
     * 开始消费
     */
    public void start() throws Exception {
        if ("HIVE".equalsIgnoreCase(targetType)) {
            // 消费者循环获取数据
            UserGroupInformation userGroupInformation = null;
            try {
                userGroupInformation = UserGroupInformation.getBestUGI(null, "hive");
                userGroupInformation.doAs(
                    (PrivilegedExceptionAction<Object>)
                        () -> {
                            while (context.getShutdown() != 1) {
                                if (nums != 0 && delay >= 0) {
                                    context.setDelay((int) (delay / nums));
                                }
                                nums = 0;
                                delay = 0;
                                time = System.currentTimeMillis();
                                writer = DataTransferFactory.getHCatWriter(cntxt);
                                writer.write(new HCatRecordItr());
                                if (nums == 0) {
                                    writer.abort(cntxt);
                                } else {
                                    log.info("fetched nums:{}", nums);
                                    writer.commit(cntxt);
                                }
                            }

                            return null;
                        });
            } catch (Exception e) {
                log.error(e.getMessage());
            } finally {
                if (userGroupInformation != null) {
                    try {
                        FileSystem.closeAllForUGI(userGroupInformation);
                    } catch (IOException e) {
                        log.error(e.getMessage(), e);
                    }
                }
            }
        }
        if ("POSTGRESQL".equalsIgnoreCase(targetType)
            || "MYSQL".equalsIgnoreCase(targetType)
            || "TiDB".equalsIgnoreCase(targetType)) {

            // 获得连接
            String targetTableName;
            switch (targetType.toUpperCase()) {
                case "KINGBASE8":
                    targetTableName = db + "." + table;
                    connection =
                        null == connection
                            ? KingBase8Util.createConnection(jdbcUrl, username, password)
                            : connection;
                    log.info("目标 POSTGRESQL：{} 连接成功", targetHosts);
                    break;
                case "POSTGRESQL":
                    targetTableName = db + "." + table;
                    connection =
                        null == connection
                            ? PostgreSQLUtil.createConnection(jdbcUrl, username, password)
                            : connection;
                    log.info("目标 POSTGRESQL：{} 连接成功", targetHosts);
                    break;
                case "MYSQL":
                case "TIDB":
                    targetTableName = table;
                    connection =
                        null == connection
                            ? MysqlUtil.createConnection(jdbcUrl, username, password)
                            : connection;
                    log.info("目标 Mysql or Tidb：{} 连接成功", targetHosts);
                    // 针对tidb和myqsl 获得完连接后起一个定时任务，1小时后 每4小时select一下
                    ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(3);
                    scheduledExecutorService.scheduleAtFixedRate(
                        () -> {
                            try {
                                log.info("开始执行 select 1...");
                                Statement statement = connection.createStatement();
                                String sql1 = "select 1";
                                statement.executeQuery(sql1);
                                log.info("刷新连接成功");
                            } catch (Exception e) {
                                log.error("刷新连接执行出错", e);
                            }
                        },
                        1,
                        4,
                        TimeUnit.HOURS);
                    break;
                default:
                    targetTableName = table;
                    break;
            }
            while (context.getShutdown() != 1) {
                // 从服务端获取数据  阻塞12秒
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(12));
                if (records.isEmpty()) {
                    continue;
                } else {
                    logMsg(records);
                }
                List<Object> insertValues = Lists.newArrayList();
                try {
                    insertValues = getInsertValuesSame(records);
                } catch (Exception e) {
                    log.error("解析消息失败，异常:", e);
                    continue;
                }
                StringBuilder sql = new StringBuilder();
                StringBuilder placeholdersBuilder = new StringBuilder();
                for (int i = 0; i < toColumn.size(); i++) {
                    placeholdersBuilder.append("?").append(",");
                }
                String placeholders = placeholdersBuilder.toString();
                placeholders = placeholders.substring(0, placeholders.length() - 1);
                sql.append("insert into ")
                    .append(targetTableName)
                    .append("(")
                    .append(StringUtils.join(toColumn, ","))
                    .append(")")
                    .append(" values")
                    .append("(")
                    .append(placeholders)
                    .append(")")
                    .append(";");
                try {
                    log.info("execsql: " + sql);
                    log.info("insertValues: " + insertValues.toString());
                    executeBatchSave(connection, sql.toString(), toColumn.size(), insertValues, targetType);
                    log.info("保存数据到目标数据库成功");
                } catch (Exception e) {
                    log.error(e.getMessage(), e);
                }
            }
        }

        if ("KAFKA".equalsIgnoreCase(targetType)) {
            Properties props = new Properties();
            props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, targetHosts);
            props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
            props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
            props.put("enable.auto.commit", "true");
            props.put("auto.commit.interval.ms", "1000");
            props.put("max.poll.records", "1");
            props.put("session.timeout.ms", "120000");

            KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(props);
            while (context.getShutdown() != 1) {
                // 从服务端获取数据  阻塞12秒
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(12));
                if (records.isEmpty()) {
                    continue;
                } else {
                    logMsg(records);
                }
                records.forEach(
                    record -> {
                        ProducerRecord<String, String> producerRecord =
                            new ProducerRecord<>(targetTopic, record.value());
                        kafkaProducer.send(producerRecord);
                        kafkaProducer.flush();
                    });
            }
        }
        if ("MQTT".equalsIgnoreCase(targetType)) {

            mqTargetClient = new EngineMqttClient();
            mqTargetClient.init(targetHosts);
            mqTargetClient.connect(null);
            while (context.getShutdown() != 1) {
                try {
                    ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(12));
                    if (records.isEmpty()) {
                        continue;
                    } else {
                        logMsg(records);
                    }
                    records.forEach(
                        record -> {
                            try {
                                mqTargetClient.publishMessage(targetTopic, record.value(), 1, false);
                            } catch (Exception e) {

                            }
                        });
                } catch (Exception e) {
                    // ignore
                }
            }
        }
        if ("OpenTSDB".equalsIgnoreCase(targetType)) {

            // 初始化opentsdbClinet
            OpentsdbClient opentsdbClient = new OpentsdbClient(targetHosts);

            while (context.getShutdown() != 1) {

                // 从服务端获取数据  阻塞12秒
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(12));
                if (records.isEmpty()) {
                    continue;
                } else {
                    logMsg(records);
                }
                List<Object> insertValues = Lists.newArrayList();
                try {
                    insertValues = getInsertValues(records);
                } catch (Exception e) {
                    continue;
                }

                String metric = "";
                Long timestamp = 0L;
                Object value;
                Map<String, Object> tags;

                // 按顺序取得要存入opentsdb的数据。这边对顺序和数据类型应该有约定。不然报错
                try {
                    metric = insertValues.get(toColumn.indexOf("metric")).toString();

                    if (toColumn.contains("timestamp")) {
                        timestamp = (Long) insertValues.get(toColumn.indexOf("timestamp"));
                    } else {
                        timestamp = System.currentTimeMillis() / 1000;
                    }

                    value = insertValues.get(toColumn.indexOf("value"));
                    tags = (Map<String, Object>) insertValues.get(toColumn.indexOf("tags"));

                    if (tags.keySet().size() > 8) {
                        log.error("OpenTSDB最多只能打8个tag");
                        continue;
                    }

                } catch (Exception e) {
                    log.error("存入opentsdb的数据不符合规范");
                    continue;
                }

                // 存入opentsdb
                try {
                    opentsdbClient.putData(metric, timestamp, value, tags);
                } catch (Exception e) {
                    log.error(e.getMessage());
                }
            }
        }
        if ("IoTDB".equalsIgnoreCase(targetType)) {

            if (!fromColumn.contains("name")) {
                throw new RuntimeException("数据源找不到name字段，无法获取device名称，无法插入");
            }
            if (!toColumn.contains("value")) {
                throw new RuntimeException("找不到value字段，无法获取测点值，无法插入");
            }

            iotdbSession = IoTDBSessionUtil.getIoTDBSession(targetServer, username, password);
            iotdbSession.open(false, 10000);
            log.info("目标 iotdb：{} 连接成功", targetServer);

            // root.xuelangyun.hls.s1
            String deviceId = table;
            //      Map<String, TSDataType> columnMap = Maps.newHashMap();
            //      for (int i = 0; i < allColumn.size(); i++) {
            //        columnMap.put(
            //            allColumn.get(i), EnumUtil.fromString(TSDataType.class, allColumnType.get(i)));
            //      }
            // 过滤出字段
            List<String> schemaList =
                toColumn.stream()
                    .filter(co -> !IoTDBSessionUtil.TIMESTAMP_MEASUREMENT_2.equals(co))
                    .filter(co -> !"name".equals(co))
                    .collect(Collectors.toList());

            while (context.getShutdown() != 1) {
                // 从服务端获取数据  阻塞12秒
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(12));
                if (records.isEmpty()) {
                    continue;
                } else {
                    logMsg(records);
                }
                List<Object> insertValues = Lists.newArrayList();
                try {
                    insertValues = getInsertValues(records);
                } catch (Exception e) {
                    log.error("解析消息失败，异常:", e);
                    continue;
                }
                try {
                    //  不使用系统时间的话，传入的kafka消息中的时间
                    Long convertTime;
                    if (toColumn.contains(IoTDBSessionUtil.TIMESTAMP_MEASUREMENT_2)) {
                        // 如果字段信息中有Time(IOTDB)，那么对传进来的时间值进行转化，变成时间戳。
                        // 这里有个注意点，iotdb中的时间戳是不能相同的
                        Object timeList =
                            insertValues.get(toColumn.indexOf(IoTDBSessionUtil.TIMESTAMP_MEASUREMENT_2));
                        convertTime = convertTime(timeList);
                        // insertValues.remove(toColumn.indexOf(IoTDBSessionUtil.TIMESTAMP_MEASUREMENT));
                    } else {
                        convertTime = System.currentTimeMillis();
                    }
                    // --------------------------------------------------------------------

                    // 1.针对于葫芦素需求，从name字段中拿到所有的测点信息，拼接成最终deviceId
                    // example：传进来的已经是某个测点，例如
                    // root.xuelangyun.hls.s1（敏捷版要选目标表，这个表是deviceId，所以要预先按照层级创建device模版）
                    // 我们最后要得到的测点是 root.xuelangyun.hls.${name} 列表

                    String prefixPath = getPrefixPath(deviceId);
                    List<String> deviceIdList = new ArrayList<>();
                    Object deviceList = insertValues.get(toColumn.indexOf("name"));

                    List insertDeviceList = Convert.convert(List.class, deviceList);
                    for (Object insertDevice : insertDeviceList) {
                        if (isContainChinese(insertDevice.toString())) {
                            // 葫芦素配电室组件的name是中文，这里要进行映射成英文
                            String insertDeviceEn = pointNameMapping(insertDevice.toString());
                            deviceIdList.add(prefixPath + "." + insertDeviceEn);
                        } else {
                            deviceIdList.add(prefixPath + "." + insertDevice);
                        }
                    }

                    // 构造insertRecords()参数
                    List<String> deviceIds = deviceIdList;
                    List<Long> times = Lists.newArrayList();
                    List<List<String>> measurementsList = Lists.newArrayList();
                    List<List<TSDataType>> typesList = Lists.newArrayList();
                    List<List<Object>> valuesList = Lists.newArrayList();

                    for (int i = 0; i < deviceIdList.size(); i++) {
                        times.add(convertTime);
                        measurementsList.add(schemaList);
                        typesList.add(Lists.newArrayList(TSDataType.FLOAT));
                        Object value = insertValues.get(toColumn.indexOf("value"));
                        List<Object> valList = Convert.convert(List.class, value);
                        // 默认value值存为float类型。
                        try {
                            List<Object> values =
                                Convert.convert(List.class, Float.parseFloat(valList.get(i).toString()));
                            valuesList.add(values);
                        } catch (Exception e) {
                            // log.error("转化float失败,默认存入-1" + e);
                            float defaultValue = -1;
                            valuesList.add(Lists.newArrayList(defaultValue));
                        }
                    }
                    iotdbSession.insertRecords(deviceIds, times, measurementsList, typesList, valuesList);
                    // log.info("写入iotdb成功");
                } catch (Exception e) {
                    log.error("保存数据到iotdb异常", e);
                }
            }
        }
        if ("InfluxDB".equalsIgnoreCase(targetType)) {
            InfluxDBClient influxDBClient =
                InfluxDBClientUtil.getInfluxDBClient(jdbcUrl, password, schema, db);
            log.info("目标 influxdb：{} 连接成功", jdbcUrl);

            try {
                while (context.getShutdown() != 1) {
                    // 从服务端获取数据  阻塞12秒
                    ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(12));
                    if (records.isEmpty()) {
                        continue;
                    } else {
                        logMsg(records);
                    }
                    List<Object> insertValues = Lists.newArrayList();
                    try {
                        insertValues = getInsertValues(records);
                    } catch (Exception e) {
                        log.error("解析消息失败，异常:", e);
                        continue;
                    }

                    List<List<Object>> values = ListUtil.split(insertValues, toColumn.size());
                    List<Point> points = new ArrayList<>();
                    for (List<Object> value : values) {
                        Date time = null;
                        Map<String, String> tags = new HashMap<>();
                        Map<String, Object> fields = new HashMap<>();
                        for (int i = 0; i < toColumn.size(); i++) {
                            String columnName = toColumn.get(i);
                            Object columnValue = value.get(i);
                            if (columnName.equals("time")) {
                                time = InfluxDBClientUtil.convert(columnValue);
                            } else if (tagColumn.contains(columnName)) {
                                tags.put(columnName, columnValue.toString());
                            } else {
                                fields.put(columnName, columnValue);
                            }
                        }
                        if (time == null) {
                            log.warn("消息中不包含timestamp列，使用当前时间作为记录的时间戳");
                            time = new Date();
                        }
                        if (fields.isEmpty()) {
                            log.warn("消息中不包含field列，忽略本次数据插入");
                            return;
                        }

                        // 生成InfluxDB Point对象
                        Point point =
                            Point.measurement(table)
                                .time(time.getTime(), WritePrecision.MS)
                                .addTags(tags)
                                .addFields(fields);
                        points.add(point);
                    }

                    if (CollectionUtils.isEmpty(points)) {
                        log.warn("消息体中没有数据需要保存到influxdb");
                    }

                    try {
                        InfluxDBClientUtil.insert(influxDBClient, points);
                        log.info("保存数据到 influxdb 成功");
                    } catch (Exception e) {
                        log.error("保存数据到 influxdb 失败 : {}", e.getMessage());
                    }
                }
            } finally {
                influxDBClient.close();
                log.info("关闭 influxdb 连接");
            }
        }

        // 关闭消费者
        consumer.close();
        if (connection != null) {
            connection.close();
        }
        log.info("taskName:{},关闭", taskName);
    }

    private String getPrefixPath(String deviceId) {
        int index = deviceId.lastIndexOf(".");
        return deviceId.substring(0, index);
    }

    /**
     * @description : 算盘组件 将"2022-12-14 13:38:18.518"这种list转化成时间戳list
     * @author : tinglan.ys
     * @date : 2022/12/15
     */
    private Long convertTime(Object timeList) {
        //  不使用系统时间的话，传入的真实时间序列
        List<Long> convertTimeList = new ArrayList<>();
        List insertValList = Convert.convert(List.class, timeList);

        for (Object insertTime : insertValList) {
            try {
                long time = DateUtil.parse((String) insertTime).getTime();
                convertTimeList.add(time);
            } catch (Exception e) {
                log.error(
                    "时间值[{}]解析异常，目前支持的日期时间格式包括\n"
                        + "yyyy-MM-dd HH:mm:ss\n"
                        + "yyyy/MM/dd HH:mm:ss\n"
                        + "yyyy.MM.dd HH:mm:ss\n"
                        + "yyyy年MM月dd日 HH时mm分ss秒\n"
                        + "yyyy-MM-dd\n"
                        + "yyyy/MM/dd\n"
                        + "yyyy.MM.dd\n"
                        + "HH:mm:ss\n"
                        + "HH时mm分ss秒\n"
                        + "yyyy-MM-dd HH:mm\n"
                        + "yyyy-MM-dd HH:mm:ss.SSS\n"
                        + "yyyy-MM-dd HH:mm:ss.SSSSSS\n"
                        + "yyyyMMddHHmmss\n"
                        + "yyyyMMddHHmmssSSS\n"
                        + "yyyyMMdd\n"
                        + "EEE, dd MMM yyyy HH:mm:ss z\n"
                        + "EEE MMM dd HH:mm:ss zzz yyyy\n"
                        + "yyyy-MM-dd'T'HH:mm:ss'Z'\n"
                        + "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\n"
                        + "yyyy-MM-dd'T'HH:mm:ssZ\n"
                        + "yyyy-MM-dd'T'HH:mm:ss.SSSZ，请选择其中任意一种格式后重试",
                    insertTime);
                throw new RuntimeException("时间戳解析异常");
            }
        }

        // 如果time是个list的话取第一个作为iotdb的时间戳
        return convertTimeList.get(0);
    }

    private class HCatRecordItr implements Iterator<HCatRecord> {

        int size = 0;
        private static final int limit = 2000;

        private static final int timeout = 120000;

        List<HCatRecord> datas = Lists.newArrayList();

        @Override
        public boolean hasNext() {

            if (size < limit
                && (System.currentTimeMillis() - time) < timeout
                && context.getShutdown() == 0) {
                List<HCatRecord> data = getRecord();
                datas.addAll(data);
                size += data.size();
            }

            if ((datas.size() == 0)) {

                if (context.getShutdown() == 1) {
                    return false;
                }

                // 如果达到单次limit上限就flush
                if (size >= limit) {
                    return false;
                }
                // 超过100秒跳出hasNext
                if ((System.currentTimeMillis() - time) > timeout) {
                    return false;
                }

                return hasNext();
            }

            return true;
        }

        @Override
        public HCatRecord next() {

            nums++;
            return datas.remove(0);
        }

        @Override
        public void remove() {
            throw new RuntimeException();
        }
    }

    // for HIVE
    private List<HCatRecord> getRecord() {

        List<HCatRecord> datas = Lists.newArrayList();

        // 从服务端获取数据  阻塞12秒
        ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(12));
        //    StringBuffer sb = new StringBuffer();
        //    records.forEach(r -> sb.append(r.toString() + "\n"));
        //    log.info("收到消息：{}", sb.toString());
        if (records == null || records.isEmpty()) {
            // 记录最后一次拿到数据的时间
            return datas;
        }

        Map<String, Object> value = null;
        // 将拿到的数据根据数据来源的列进行封装
        for (ConsumerRecord<String, String> record : records) {
            long time1 = record.timestamp();

            context.setLastSyncTime(new Date());

            value = Maps.newHashMap();
            try {
                if (useJsFlag == 0) {

                    for (int i = 0; i < fromColumn.size(); i++) {
                        String col = fromColumn.get(i);
                        String exp = jsonPathExp.get(i);
                        if (StringUtils.isNotBlank(col) && "__value__".equals(col)) {
                            value.put(col, record.value());
                        } else {
                            Object obj = JsonPath.read(record.value(), exp);
                            log.info("KAFKA收到的数据：" + record.value());
                            log.info("col:" + col + "," + "exp:" + exp + "," + "value:" + obj);
                            value.put(col, obj);
                        }
                    }

                    if (value.isEmpty()) {
                        context.addFailNum();
                    }

                    List<Object> data = Lists.newArrayList();
                    if (!value.isEmpty()) {
                        // 先设置分区值
                        for (String value1 : partitionValue) {
                            data.add(VarUtil.getOfflineTaskValue(value1));
                        }
                        for (int i = 0; i < allColumn.size(); i++) {
                            Integer index = columnIndex.get(allColumn.get(i));
                            if (index != null) {
                                data.add(
                                    value.get(fromColumn.get(index)) == null
                                        ? null
                                        : getvalue(
                                        allColumnType.get(i),
                                        String.valueOf(value.get(fromColumn.get(index)))));
                            } else {
                                data.add(null);
                            }
                        }
                        delay = delay + System.currentTimeMillis() - time1;
                        datas.add(new DefaultHCatRecord(data));
                    }
                } else {
                    // 使用javaScript解析
                    // id name | 1 aa | 2 bb | 3 cc
                    List<List> lists = AnalysisUtil.callJSFunction(jsContent, record.value());
                    List fieldNames = lists.get(0);
                    lists.remove(0);
                    for (int j = 0; j < lists.size(); j++) {
                        //
                        List rowData = lists.get(j);
                        for (int i1 = 0; i1 < rowData.size(); i1++) {
                            //
                            Object o = rowData.get(i1);
                            value.put(String.valueOf(fieldNames.get(i1)), o);
                        }
                        if (value.isEmpty()) {
                            context.addFailNum();
                        }

                        List<Object> data = Lists.newArrayList();
                        if (!value.isEmpty()) {

                            // 先设置分区值
                            for (String value1 : partitionValue) {
                                data.add(VarUtil.getOfflineTaskValue(value1));
                            }
                            for (int i = 0; i < allColumn.size(); i++) {
                                Integer index = columnIndex.get(allColumn.get(i));
                                if (index != null) {
                                    data.add(
                                        value.get(fromColumn.get(index)) == null
                                            ? null
                                            : getvalue(
                                            allColumnType.get(i),
                                            String.valueOf(value.get(fromColumn.get(index)))));
                                } else {
                                    data.add(null);
                                }
                            }
                            delay = delay + System.currentTimeMillis() - time1;
                            datas.add(new DefaultHCatRecord(data));
                        }
                    }
                }

            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }

        return datas;
    }

    public Object getvalue(String columnType, String value) {

        Object val = null;
        try {
            if (value == null) {
                return val;
            }

            switch (columnType) {
                case "tinyint":
                    val = Byte.valueOf(value);
                    break;
                case "smallint":
                    val = Short.valueOf(value);
                    break;
                case "int":
                    val = Integer.valueOf(value);
                    break;
                case "bigint":
                    val = Long.valueOf(value);
                    break;
                case "float":
                    val = Float.valueOf(value);
                    break;
                case "double":
                    val = Double.valueOf(value);
                    break;
                case "decimal":
                    val = HiveDecimal.create(new BigDecimal(value));
                    break;
                case "timestamp":
                    val = Timestamp.valueOf(value);
                    break;
                case "date":
                    val = java.sql.Date.valueOf(value);
                    break;
                case "string":
                case "varchar":
                case "char":
                    val = value;
                    break;
                case "boolean":
                    val = Boolean.valueOf(value);
                    break;
                case "binary":
                    val = Byte.valueOf(value);
                    break;
                default:
                    break;
            }
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }

        return val;
    }

    private List<String> splicingInertSql(List<String> toColumn, List<Object> value) {
        return null;
    }

    private List<Map<String, Object>> analysisRecord(ConsumerRecord<String, String> record) {
        HashMap<String, Object> value = Maps.newHashMap();
        List<Map<String, Object>> data = Lists.newArrayList();
        if (useJsFlag == 0) {
            // 不使用javaScript解析
            for (int i = 0; i < fromColumn.size(); i++) {
                String col = fromColumn.get(i);
                String exp = jsonPathExp.get(i);
                if (StringUtils.isNotBlank(col) && "__value__".equals(col)) {
                    value.put(col, record.value());
                } else {
                    Object obj = JsonPath.read(record.value(), exp);
                    value.put(col, obj);
                }
            }

            if (value.isEmpty()) {
                context.addFailNum();
            }
            data.add(value);
        } else {
            // 使用javaScript解析
            // id name | 1 aa | 2 bb | 3 cc
            List<List> lists = AnalysisUtil.callJSFunction(jsContent, record.value());
            List fieldNames = lists.get(0);
            lists.remove(0);
            for (int j = 0; j < lists.size(); j++) {
                //
                List rowData = lists.get(j);
                for (int i1 = 0; i1 < rowData.size(); i1++) {
                    //
                    Object o = rowData.get(i1);
                    value.put(String.valueOf(fieldNames.get(i1)), o);
                }
                if (value.isEmpty()) {
                    context.addFailNum();
                }
                data.add(value);
            }
        }
        return data;
    }

    /**
     * @description: 这里特别注意：同样一条json，jsonPath解析出来的数据格式和脚本解析出来的格式是不同的 例如： jsonPath是这个组装的：
     * [["cpulyv25","nclyv25","nclyv26"], ["0.000","24.000","24.000"]] 脚本模式是这么组装的 ["cpulyv25",
     * "0.000", "nclyv25", "24.000", "nclyv26", "24.000"]
     * <p>因此开发应该要结合项目实际数据格式和同步任务具体选用解析方式，对该方法的返回值进行加工再进行数据入库。
     * 我这里提供一个方法使得jsonPath解析出来的数据格式和脚本解析出来的格式相同getInsertValuesSame()
     * @author: tinglan.ys
     * @date: 14:54 2023/4/19
     */
    public List<Object> getInsertValues(ConsumerRecords<String, String> records) {
        List<Object> insertValues = Lists.newArrayList();
        records.forEach(
            record -> {
                // record json数据解析
                List<Map<String, Object>> maps = analysisRecord(record);
                maps.forEach(
                    map -> {
                        Collection<Object> values = Lists.newArrayList();
                        fromColumn.forEach(
                            column -> {
                                Object value = map.get(column);
                                values.add(value);
                            });
                        insertValues.addAll(values);
                    });
            });
        return insertValues;
    }

    public List<Object> getInsertValuesSame(ConsumerRecords<String, String> records) {
        List<Object> insertValues = getInsertValues(records);
        if (useJsFlag == 0) {
            // jsonPath解析
            int insertNum = Convert.convert(List.class, insertValues.get(0)).size();
            List<Object> insertValuesFinal = new ArrayList<>();
            for (int i = 0; i < insertNum; i++) {
                // List<Object> rowData = new ArrayList<>();
                for (Object insertValue : insertValues) {
                    List columnVal = Convert.convert(List.class, insertValue);
                    // rowData.add(columnVal.get(i));
                    if (CollectionUtils.isEmpty(columnVal)) {
                        insertValuesFinal.add("");
                    } else {
                        insertValuesFinal.add(columnVal.get(i));
                    }
                }
            }
            return insertValuesFinal;
        }
        return insertValues;
    }

    public PreparedStatement createPreparedStatement(Connection conn, String sql, Object... params)
        throws SQLException {
        PreparedStatement statement = conn.prepareStatement(sql);
        if (params == null || params.length == 0) {
            return statement;
        }
        for (int index = 0; index < params.length; index++) {
            Object param = params[index];
            if (param instanceof String) {
                statement.setString(index + 1, (String) param);
            } else if (param instanceof Long) {
                statement.setLong(index + 1, (Long) param);
            } else if (param instanceof Integer) {
                statement.setInt(index + 1, (Integer) param);
            } else {
                statement.setString(index + 1, param.toString());
                // throw new SQLException("not support param type");
            }
        }
        return statement;
    }

    private static boolean isContainChinese(String str) {
        Pattern p = Pattern.compile("[\u4e00-\u9fa5]");
        Matcher m = p.matcher(str);
        if (m.find()) {
            return true;
        }
        return false;
    }

    private String pointNameMapping(String deviceNameCh) {
        String deviceNameEn = csvData.get(deviceNameCh);
        if (deviceNameEn == null) {
            return "unknowdevice";
        }
        return deviceNameEn;
    }

    private void logMsg(ConsumerRecords<String, String> records) {
        StringBuffer sb = new StringBuffer();
        records.forEach(record -> sb.append(record.value()).append("\n"));
        String logMessage =
            sb.length() > 256
                ? StringUtils.substring(String.valueOf(sb), 0, 256) + " . . ."
                : String.valueOf(sb);
        log.info("收到来自kafka的消息:\n{}", logMessage);
    }
}
