package com.sh.data.engine.mqtt;

import cn.hutool.core.collection.ListUtil;
import cn.hutool.core.convert.Convert;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.EnumUtil;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.domain.WritePrecision;
import com.influxdb.client.write.Point;
import com.jayway.jsonpath.JsonPath;
import com.sh.data.engine.influxdb.client.InfluxDBClientUtil;
import com.sh.data.engine.iotdb.client.IoTDBSessionUtil;
import com.sh.data.engine.iotdb.domain.TabletDomain;
import com.sh.data.engine.iotdb.domain.TabletDomain.TabletDataSet;
import com.sh.data.engine.mbean.Context;
import com.sh.data.engine.util.AnalysisUtil;
import com.sh.data.engine.util.MysqlUtil;
import com.sh.data.engine.util.PostgreSQLUtil;
import com.sh.data.engine.velocity.VelocityScriptEngine;
import com.sh.data.engine.velocity.model.iotdb.IotDataModel;
import com.sh.data.engine.vo.OnlineTaskParam;
import lombok.Data;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.hcatalog.data.HCatRecord;
import org.apache.hive.hcatalog.data.transfer.DataTransferFactory;
import org.apache.hive.hcatalog.data.transfer.HCatWriter;
import org.apache.hive.hcatalog.data.transfer.WriteEntity;
import org.apache.hive.hcatalog.data.transfer.WriterContext;
import org.apache.iotdb.session.Session;
import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.FileInputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.security.PrivilegedExceptionAction;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.*;
import java.util.stream.Collectors;

import static com.sh.data.engine.util.BatchSaveUtil.executeBatchSave;

/**
 * @auther: futang.tyf
 * @date: 2018/11/14 15:03
 * @description:
 */
@Data
public class EngineMqttConsumer {

    private static final Logger log = LoggerFactory.getLogger(EngineMqttConsumer.class);

    private String hosts;

    private List<String> allColumn;

    private List<String> allColumnType;

    private List<String> partitionValue = Lists.newArrayList();

    private List<String> toColumn;

    private List<String> fromColumn;

    private List<String> tagColumn;

    private String topic;

    private String hiveMetastoreUris;

    private String hdfsUrl;

    private String nameServices;

    private String nameNodesAddrs;

    private String nameNodes;

    private String db;

    private String table;

    // private String taskId;
    private Long taskId;

    private String taskIdMD5;

    // private String recordId;
    private Long recordId;

    private String taskName;

    private EngineMqttClient engineMqttClient;

    private Map<String, Integer> columnIndex;

    private WriterContext cntxt;

    private HCatWriter writer;

    private Context context;

    // 目标数据源类型:kafka/mqtt  mysql/pg  hive
    private String targetType;

    // 目标为mysql/pg
    private String jdbcUrl;

    // js脚本解析json
    private String jsContent;

    // 是否使用js解析 0-默认 1-jsonpath脚本 2-velocity脚本
    private Integer useJsFlag;

    // jsonpath 表达式
    private List<String> jsonPathExp;

    private String schema;

    private String username;

    private String password;

    private String targetTopic;

    private String targetHosts;

    private String hiveSitePath;

    private Connection jdbcConnection;

    private Properties kafkaProducerProps;

    private EngineMqttClient engineMqttTargetClient;

    private Session iotdbSession;

    private InfluxDBClient influxDBClient;

    private String targetServer;

    private Integer targetPort;

    public EngineMqttConsumer(OnlineTaskParam param, Context context) {
        this.hosts = param.getHosts();
        this.allColumn = param.getAllColumn();
        this.allColumnType = param.getAllColumnType();
        this.partitionValue = param.getPartitionValue();
        this.toColumn = param.getToColumn();
        this.fromColumn = param.getFromColumn();
        this.tagColumn = param.getTagColumn();
        this.topic = param.getTopic();
        this.hiveMetastoreUris = param.getHiveMetastoreUris();
        this.hdfsUrl = param.getHdfsUrl();
        this.nameServices = param.getNameServices();
        this.nameNodesAddrs = param.getNameNodesAddrs();
        this.nameNodes = param.getNameNodes();
        this.db = param.getDb();
        this.table = param.getTable();
        this.taskId = param.getTaskId();
        this.recordId = param.getRecordId();
        this.taskName = param.getTaskName();
        this.context = context;
        this.targetType = param.getTargetType();
        this.jdbcUrl = param.getJdbcUrl();
        this.jsContent = param.getJsContent();
        this.useJsFlag = param.getUseJsFlag();
        this.jsonPathExp = param.getJsonPathExp();
        this.schema = param.getSchema();
        this.username = param.getUsername();
        this.password = param.getPassword();
        this.targetTopic = param.getTargetTopic();
        this.targetHosts = param.getTargetHosts();
        this.taskIdMD5 = param.getTaskIdMD5();
        this.hiveSitePath = param.getHiveSitePath();
        this.targetServer = param.getTargetServer();
        this.targetPort = param.getTargetPort();
    }

    public void init() throws Exception {
        // 1、获取来源mqtt连接
        engineMqttClient = new EngineMqttClient();
        engineMqttClient.init(hosts);
        engineMqttClient.connect(new EngineMqttCallback(engineMqttClient.getClientId(), this));
        // 2、获取目标端连接
        this.initTargetConnection();

        log.info("任务 :[{}] 启动成功，准备运行......", taskName);
        // join，释放资源
        Thread.currentThread().join();
    }

    int nums = 0;

    long time = System.currentTimeMillis();

    public void consume(String message) {
        String targetTypeUpperCase = targetType.toUpperCase();
        try {
            switch (targetTypeUpperCase) {
                case "HIVE":
                    processMq2Hive(message);
                    break;
                case "POSTGRESQL":
                    // processMq2PG(message);
                    // break;
                case "MYSQL":
                    processMq2RDB(targetTypeUpperCase, message);
                    break;
                case "KAFKA":
                    processMq2Kafka(message);
                    break;
                case "MQTT":
                    handleMq2Mq(message);
                    break;
                case "IOTDB":
                    processMq2Iotdb(message);
                    break;
                case "INFLUXDB":
                    processMq2Influxdb(message);

                default:
                    break;
            }
        } catch (Exception e) {
            log.error("transfer data failed : {}", e.getMessage());
        }
    }

    private void initTargetConnection() throws Exception {
        // 初始化hcatalog
        if ("hive".equalsIgnoreCase(targetType)) {
            for (String ac : this.allColumn) {
                for (int i = 0; i < toColumn.size(); i++) {
                    String c = toColumn.get(i);
                    if (ac.equals(c)) {
                        columnIndex.put(ac, i);
                        break;
                    }
                }
            }
            this.columnIndex = columnIndex;

            HiveConf hiveConf = new HiveConf(this.getClass());
            FileInputStream inputStream = new FileInputStream(hiveSitePath);
            hiveConf.addResource(inputStream);
            Iterator<Map.Entry<String, String>> itr = hiveConf.iterator();
            Map<String, String> config = new HashMap<>();
            while (itr.hasNext()) {
                Map.Entry<String, String> kv = itr.next();
                config.put(kv.getKey(), kv.getValue());
            }

            WriteEntity.Builder builder = new WriteEntity.Builder();
            WriteEntity entity = builder.withDatabase(db).withTable(table).build();
            writer = DataTransferFactory.getHCatWriter(entity, config);
            UserGroupInformation userGroupInformation = null;
            try {
                userGroupInformation = UserGroupInformation.getBestUGI(null, "hive");
                Object object =
                    userGroupInformation.doAs(
                        new PrivilegedExceptionAction<Object>() {
                            @Override
                            public Object run() throws Exception {
                                cntxt = writer.prepareWrite();
                                return null;
                            }
                        });
            } catch (Exception e) {
                log.error(e.getMessage());
                throw e;
            } finally {
                if (userGroupInformation != null) {
                    try {
                        FileSystem.closeAllForUGI(userGroupInformation);
                    } catch (IOException e) {
                        log.error(e.getMessage());
                    }
                }
            }
        }

        if ("kafka".equalsIgnoreCase(targetType)) {
            kafkaProducerProps = new Properties();
            kafkaProducerProps.put(
                ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, targetHosts); // kafka集群，broker-list
            kafkaProducerProps.put(ProducerConfig.ACKS_CONFIG, "0");
            kafkaProducerProps.put(ProducerConfig.RETRIES_CONFIG, 0); // 重试次数
            kafkaProducerProps.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384); // 批次大小
            kafkaProducerProps.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, 10485760); // 消息体大小10M
            kafkaProducerProps.put(ProducerConfig.LINGER_MS_CONFIG, 1); // 等待时间
            kafkaProducerProps.put(
                ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432); // RecordAccumulator缓冲区大小
            // kafkaProducerProps.put(ProducerConfig.METADATA_MAX_AGE_CONFIG, 60000);
            kafkaProducerProps.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 2000); // 最大阻塞时间
            kafkaProducerProps.put(
                ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
                "org.apache.kafka.common.serialization.StringSerializer");
            kafkaProducerProps.put(
                ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
                "org.apache.kafka.common.serialization.StringSerializer");
        }

        if ("mqtt".equalsIgnoreCase(targetType)) {
            engineMqttTargetClient = new EngineMqttClient();
            engineMqttTargetClient.init(targetHosts);
            engineMqttTargetClient.connect(null);
        }

        if ("iotdb".equalsIgnoreCase(targetType)) {
            iotdbSession = IoTDBSessionUtil.getIoTDBSession(targetServer, username, password);
            iotdbSession.open(false, 10000);
            log.info("目标 iotdb：{} 连接成功", targetServer);
        }
        if ("influxdb".equalsIgnoreCase(targetType)) {
            influxDBClient = InfluxDBClientUtil.getInfluxDBClient(jdbcUrl, password, schema, db);
            log.info("目标 influxdb：{} 连接成功", jdbcUrl);
        }
        if ("postgresql".equalsIgnoreCase(targetType)) {
            jdbcConnection = PostgreSQLUtil.createConnection(jdbcUrl, username, password);
            log.info("目标 postgres：{} 连接成功", jdbcUrl);
        }
        if ("mysql".equalsIgnoreCase(targetType)) {
            jdbcConnection = MysqlUtil.createConnection(jdbcUrl, username, password);
            log.info("目标 mysql：{} 连接成功", jdbcUrl);
        }
    }

    private void processMq2Hive(String message) {
        // 消费者循环获取数据
        UserGroupInformation userGroupInformation = null;
        try {
            userGroupInformation = UserGroupInformation.getBestUGI(null, "hive");
            userGroupInformation.doAs(
                new PrivilegedExceptionAction<Object>() {
                    @Override
                    public Object run() throws Exception {
                        while (true) {
                            if (context.getShutdown() == 1) {
                                break;
                            }
                            nums = 0;
                            time = System.currentTimeMillis();
                            writer = DataTransferFactory.getHCatWriter(cntxt);
                            try {
                                writer.write(new HCatRecordItr());
                            } catch (Exception e) {
                                log.error(e.getMessage(), e);
                                throw e;
                            } finally {
                                if (nums == 0) {
                                    writer.abort(cntxt);
                                } else {
                                    log.info("fetched nums:{}", nums);
                                    writer.commit(cntxt);
                                }
                            }
                        }
                        return null;
                    }
                });
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        } finally {
            if (userGroupInformation != null) {
                try {
                    FileSystem.closeAllForUGI(userGroupInformation);
                } catch (IOException e) {
                    log.error(e.getMessage(), e);
                }
            }
        }
    }

    private void processMq2Iotdb(String record) {
        // TabletDomain tabletDomain = new TabletDomain();
        List<TabletDomain> tabletDomains = Lists.newArrayList();
        try {
            if (2 != useJsFlag) { // 非velocity解析方式
                TabletDomain tabletDomain = new TabletDomain();
                String deviceId = table;
                Map<String, TSDataType> columnMap = Maps.newHashMap();
                for (int i = 0; i < allColumn.size(); i++) {
                    columnMap.put(
                        allColumn.get(i), EnumUtil.fromString(TSDataType.class, allColumnType.get(i)));
                }
                List<MeasurementSchema> schemaList =
                    toColumn.stream()
                        .filter(co -> !IoTDBSessionUtil.TIMESTAMP_MEASUREMENT.equals(co))
                        .map(co -> new MeasurementSchema(co, columnMap.get(co)))
                        .collect(Collectors.toList());
                tabletDomain.setDeviceId(deviceId);
                tabletDomain.setSchemaList(schemaList);

                List<Object> insertValues = getInsertValues(record);
                List<List<Object>> values = ListUtil.split(insertValues, toColumn.size());
                List<TabletDataSet> tabletDataSets =
                    values.stream()
                        .map(
                            (v) -> {
                                TabletDataSet tabletDataSet = new TabletDataSet();
                                int index;
                                if ((index = toColumn.indexOf(IoTDBSessionUtil.TIMESTAMP_MEASUREMENT))
                                    != -1) {
                                    String t = (String) v.remove(index);
                                    long time = 0l;
                                    try {
                                        time = DateUtil.parse(t).getTime();
                                    } catch (Exception e) {
                                        log.error(
                                            "时间值[{}]解析异常，目前支持的日期时间格式包括\n"
                                                + "yyyy-MM-dd HH:mm:ss\n"
                                                + "yyyy/MM/dd HH:mm:ss\n"
                                                + "yyyy.MM.dd HH:mm:ss\n"
                                                + "yyyy年MM月dd日 HH时mm分ss秒\n"
                                                + "yyyy-MM-dd\n"
                                                + "yyyy/MM/dd\n"
                                                + "yyyy.MM.dd\n"
                                                + "HH:mm:ss\n"
                                                + "HH时mm分ss秒\n"
                                                + "yyyy-MM-dd HH:mm\n"
                                                + "yyyy-MM-dd HH:mm:ss.SSS\n"
                                                + "yyyy-MM-dd HH:mm:ss.SSSSSS\n"
                                                + "yyyyMMddHHmmss\n"
                                                + "yyyyMMddHHmmssSSS\n"
                                                + "yyyyMMdd\n"
                                                + "EEE, dd MMM yyyy HH:mm:ss z\n"
                                                + "EEE MMM dd HH:mm:ss zzz yyyy\n"
                                                + "yyyy-MM-dd'T'HH:mm:ss'Z'\n"
                                                + "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\n"
                                                + "yyyy-MM-dd'T'HH:mm:ssZ\n"
                                                + "yyyy-MM-dd'T'HH:mm:ss.SSSZ，请选择其中任意一种格式后重试",
                                            t);
                                        throw new RuntimeException("时间戳解析异常");
                                    }
                                    tabletDataSet.setTimestamp(time);
                                } else {
                                    tabletDataSet.setTimestamp(System.currentTimeMillis());
                                }
                                tabletDataSet.setValues(v);
                                return tabletDataSet;
                            })
                        .collect(Collectors.toList());
                tabletDomain.setTabletDataSets(tabletDataSets);
                tabletDomains.add(tabletDomain);
            } else {
                List<IotDataModel> dataModels =
                    VelocityScriptEngine.evel(record, jsContent, UUID.randomUUID().toString());
                tabletDomains =
                    dataModels.stream()
                        .map(d -> TabletDomain.generateFromIOTModel(d))
                        .collect(Collectors.toList());
            }
            if (CollectionUtils.isEmpty(tabletDomains)) {
                log.info("the message filter by tag and value is invalid and has been dropped");
            }
            long l1 = System.currentTimeMillis();
            List<String> deviceIds = Lists.newArrayList();
            // tabletDomains.stream().forEach(tabletDomain->{
            for (TabletDomain tabletDomain : tabletDomains) {
                if (CollectionUtils.isEmpty(tabletDomain.getSchemaList())) {
                    log.info("未解析到有效测点，请检查后重试！");
                    continue;
                }
                IoTDBSessionUtil.insertTablet(iotdbSession, tabletDomain);
                deviceIds.add(tabletDomain.getDeviceId());
            }
            // });

            long l2 = System.currentTimeMillis();
            log.info("保存数据到 iotdb 成功,耗时:{}ms 设备id：{}", l2 - l1, deviceIds);
        } catch (Exception e) {
            log.error("保存数据到 iotdb 失败 : {}", e.getMessage());
        }
    }

    private void processMq2Influxdb(String record) {
        // 返回的顺序是fromColumn的顺序
        List<Object> insertValues = getInsertValues(record);
        List<List<Object>> values = ListUtil.split(insertValues, toColumn.size());
        List<Point> points = new ArrayList<>();
        for (List<Object> value : values) {
            Date time = null;
            Map<String, String> tags = new HashMap<>();
            Map<String, Object> fields = new HashMap<>();
            for (int i = 0; i < toColumn.size(); i++) {
                String columnName = toColumn.get(i);
                Object columnValue = value.get(i);
                if (columnName.equals("time")) {
                    time = InfluxDBClientUtil.convert(columnValue);
                } else if (tagColumn.contains(columnName)) {
                    tags.put(columnName, columnValue.toString());
                } else {
                    fields.put(columnName, columnValue);
                }
            }
            if (time == null) {
                log.warn("消息中不包含timestamp列，使用当前时间作为记录的时间戳");
                time = new Date();
            }
            if (fields.isEmpty()) {
                log.warn("消息中不包含field列，忽略本次数据插入");
                return;
            }

            // 生成InfluxDB Point对象
            Point point =
                Point.measurement(table)
                    .time(time.getTime(), WritePrecision.MS)
                    .addTags(tags)
                    .addFields(fields);
            points.add(point);
        }

        if (CollectionUtils.isEmpty(points)) {
            log.warn("消息体中没有数据需要保存到influxdb");
        }

        try {
            InfluxDBClientUtil.insert(influxDBClient, points);
            log.info("保存数据到 influxdb 成功");
        } catch (Exception e) {
            log.error("保存数据到 influxdb 失败 : {}", e.getMessage());
        }
    }

    private void processMq2Kafka(String record) {
        try (org.apache.kafka.clients.producer.KafkaProducer<String, String> kafkaProducer =
                 new org.apache.kafka.clients.producer.KafkaProducer<>(kafkaProducerProps)) {
            log.info("目标 kafka broker：{},准备进行数据转发", targetHosts);
            try {
                ProducerRecord<String, String> producerRecord =
                    new ProducerRecord<String, String>(targetTopic, record);
                kafkaProducer.send(
                    producerRecord,
                    (metadata, exception) -> {
                        if (Objects.isNull(exception)) {
                            log.info("mqtt 消息转发 kafka 成功");
                        } else {
                            log.error("mqtt 消息转发 kafka 失败 : {}", exception.getMessage());
                        }
                    });
                kafkaProducer.flush();
            } catch (Exception e) {
                log.error("mqtt 消息转发 kafka 失败 : {}", e.getMessage());
            }
        }
    }

    private void processMq2RDB(String dsType, String record) {
        String targetTableName;
        switch (dsType) {
            case "POSTGRESQL":
                targetTableName = db + "." + table;
                break;
            case "MYSQL":
            default:
                targetTableName = table;
                break;
        }

        List<Object> insertValues = null;
        try {
            insertValues = getInsertValuesSame(record);
        } catch (Exception e) {
            log.error("extract insert values error : {}", e);
        }
        if (CollectionUtils.isEmpty(insertValues)) {
            return;
        }

        StringBuilder sql = new StringBuilder();
        StringBuilder placeholdersBuilder = new StringBuilder();

        for (int i = 0; i < toColumn.size(); i++) {
            placeholdersBuilder.append("?").append(",");
        }
        String placeholders = placeholdersBuilder.toString();
        placeholders = placeholders.substring(0, placeholders.length() - 1);
        sql.append("insert into ")
            .append(targetTableName)
            .append("(")
            .append(StringUtils.join(toColumn, ","))
            .append(")")
            .append(" values")
            .append("(")
            .append(placeholders)
            .append(")")
            .append(";");
        try {
            log.info("execsql: " + sql);
            log.info("insertValues: " + insertValues);
            executeBatchSave(jdbcConnection, sql.toString(), toColumn.size(), insertValues, targetType);
            log.info("保存数据到目标数据库成功");
        } catch (Exception e) {
            log.error("保存数据到目标数据库失败：{}", e.getMessage());
        }
    }

    private void processMq2PG(String message) {
        // 用 processMq2RDB（）这个方法 钱江制冷mqtt-pg报错了 重新写一个

        List<Object> insertValues = null;
        try {
            insertValues = getInsertValues(message);
        } catch (Exception e) {
            log.error("extract insert values error", e);
        }
        if (CollectionUtils.isEmpty(insertValues)) {
            return;
        }

        StringBuilder sql = new StringBuilder();
        StringBuilder placeholdersBuilder = new StringBuilder();
        for (int i = 0; i < insertValues.size(); i++) {
            placeholdersBuilder.append("?").append(",");
        }
        String placeholders = placeholdersBuilder.toString();
        placeholders = placeholders.substring(0, placeholders.length() - 1);
        sql.append("insert into ")
            .append(table)
            .append("(")
            .append(StringUtils.join(toColumn, ","))
            .append(")")
            .append(" values")
            // .append(StringUtils.join(insertValues,","))
            .append("(")
            .append(placeholders)
            .append(")")
            .append(";");

        // Connection connection = null;
        // 徐州工业/钱江制冷 那里一个kafka文本里面有好几条数据要一次插入。这里调整一下
        // 这里的取第一个字段 获得要插入的行数
        //
        int insertNum = Convert.convert(List.class, insertValues.get(0)).size();

        try {
            if (jdbcConnection == null) {
                jdbcConnection = PostgreSQLUtil.createConnection(jdbcUrl, username, password);
            }
            for (int i = 0; i < insertNum; i++) {
                List<Object> rowData = new ArrayList<>();
                for (Object insertValue : insertValues) {
                    List columnVal = Convert.convert(List.class, insertValue);
                    rowData.add(columnVal.get(i));
                }

                PreparedStatement psmt =
                    createPreparedStatement(jdbcConnection, sql.toString(), rowData.toArray());
                psmt.execute();
            }

        } catch (Exception e) {
            log.error(e.getMessage());
        }
    }

    private void handleMq2Mq(String record) {
        try {
            engineMqttTargetClient.publishMessage(targetTopic, record, 1, false);
            // mqTargetConnection.publish(targetTopic, record.getBytes(), QoS.AT_LEAST_ONCE, false);
            log.info("mqtt 消息转发 mqtt 成功");
        } catch (Exception e) {
            log.error("mqtt 消息转发 mqtt 失败 : {}", e.getMessage());
        }
    }

    private class HCatRecordItr implements Iterator<HCatRecord> {

        int size = 0;
        private static final int limit = 2000;

        private static final int timeout = 120000;

        List<HCatRecord> datas = Lists.newArrayList();

        @Override
        public boolean hasNext() {

            if (size < limit
                && (System.currentTimeMillis() - time) < timeout
                && context.getShutdown() == 0) {
                // List<HCatRecord> data = getRecord();
                List<HCatRecord> data = null;
                datas.addAll(data);
                size += data.size();
            }

            if ((datas.size() == 0)) {

                if (context.getShutdown() == 1) {
                    return false;
                }

                // 如果达到单次limit上限就flush
                if (size >= limit) {
                    return false;
                }
                // 超过120秒跳出hasNext
                if ((System.currentTimeMillis() - time) > timeout) {
                    return false;
                }

                return hasNext();
            }

            return true;
        }

        @Override
        public HCatRecord next() {
            nums++;
            return datas.remove(0);
        }

        @Override
        public void remove() {
            throw new RuntimeException();
        }
    }

  /* private List<HCatRecord> getRecord() {

    List<HCatRecord> datas = Lists.newArrayList();

    try {

      if (!connection.isConnected()) {
        connection = mqtt.blockingConnection();
        connection.connect();
        Topic[] topics = {new Topic(topic, QoS.EXACTLY_ONCE)};
        connection.subscribe(topics);
      }

    } catch (Exception e) {
      throw new RuntimeException("创建mqtt连接失败");
    }

    // 从服务端获取数据
    Message message;

    try {
      message = connection.receive();
    } catch (Exception e) {
      log.error(e.getMessage(), e);
      return datas;
    }
    if (message == null) {
      return datas;
    }
    message.ack();

    // 将拿到的数据根据数据来源的列进行封装
    context.setLastSyncTime(new Date());

    Map<String, Object> value = Maps.newHashMap();
    try {
      for (String col : fromColumn) {

        if (StringUtils.isNotBlank(col) && "__value__".equals(col)) {
          value.put(col, new String(message.getPayload(), "UTF-8"));
        } else {
          Object obj = JsonPath.read(new String(message.getPayload(), "UTF-8"), "$." + col);
          value.put(col, obj);
        }
      }

    } catch (Exception e) {
      log.error(e.getMessage(), e);
    }

    if (value == null || value.isEmpty()) {
      context.addFailNum();
    }

    List<Object> data = Lists.newArrayList();
    if (value != null && !value.isEmpty()) {
      // 先设置分区值
      for (String value1 : partitionValue) {
        data.add(VarUtil.getOfflineTaskValue(value1));
      }
      // fixme
      for (int i = 0; i < allColumn.size(); i++) {
        Integer index = columnIndex.get(allColumn.get(i));
        if (index != null) {
          data.add(
              value.get(fromColumn.get(index)) == null
                  ? null
                  : getvalue(
                      allColumnType.get(i), String.valueOf(value.get(fromColumn.get(index)))));
        } else {
          data.add(null);
        }
      }
      datas.add(new DefaultHCatRecord(data));
    }
    return datas;
  } */

    public Object getvalue(String columnType, String value) {

        Object val = null;
        try {
            if (value == null) {
                return val;
            }

            switch (columnType) {
                case "tinyint":
                    val = Byte.valueOf(value);
                    break;
                case "smallint":
                    val = Short.valueOf(value);
                    break;
                case "int":
                    val = Integer.valueOf(value);
                    break;
                case "bigint":
                    val = Long.valueOf(value);
                    break;
                case "float":
                    val = Float.valueOf(value);
                    break;
                case "double":
                    val = Double.valueOf(value);
                    break;
                case "decimal":
                    val = HiveDecimal.create(new BigDecimal(value));
                    break;
                case "timestamp":
                    val = Timestamp.valueOf(value);
                    break;
                case "date":
                    val = java.sql.Date.valueOf(value);
                    break;
                case "string":
                case "varchar":
                case "char":
                    val = value;
                    break;
                case "boolean":
                    val = Boolean.valueOf(value);
                    break;
                case "binary":
                    val = Byte.valueOf(value);
                    break;
                default:
                    break;
            }
        } catch (Exception e) {
            log.error("getvalue 异常 ：{}", e.getMessage());
        }

        return val;
    }

    private List<Map<String, Object>> analysisRecord(String record) {
        HashMap<String, Object> value = Maps.newHashMap();
        List<Map<String, Object>> data = Lists.newArrayList();
        if (useJsFlag == 0) {
            // 不使用javaScript解析
            for (int i = 0; i < fromColumn.size(); i++) {
                String col = fromColumn.get(i);
                String exp = jsonPathExp.get(i);
                if (StringUtils.isNotBlank(col) && "__value__".equals(col)) {
                    value.put(col, record);
                } else {
                    Object obj = null;
                    try {
                        obj = JsonPath.read(record, exp);
                    } catch (Exception e) {
                        log.error("jsonpath解析异常：{}", e.getMessage());
                    }
                    value.put(col, obj);
                }
            }

            if (value == null || value.isEmpty()) {
                context.addFailNum();
            }

            data.add(value);
        } else {
            // 使用javaScript解析
            // id name | 1 aa | 2 bb | 3 cc
            List<List> lists = AnalysisUtil.callJSFunction(jsContent, record);
            List fieldNames = lists.get(0);
            lists.remove(0);
            for (int j = 0; j < lists.size(); j++) {
                List rowData = lists.get(j);
                HashMap<String, Object> valu = Maps.newHashMap();
                for (int i1 = 0; i1 < rowData.size(); i1++) {
                    Object o = rowData.get(i1);
                    valu.put(String.valueOf(fieldNames.get(i1)), o);
                }
                if (valu == null || valu.isEmpty()) {
                    context.addFailNum();
                }
                data.add(valu);
            }
        }
        return data;
    }

    public List<Object> getInsertValues(String record) {
        List<Object> insertValues = Lists.newArrayList();
        // record json数据解析
        List<Map<String, Object>> maps = analysisRecord(record);
        maps.forEach(
            map -> {
                Collection<Object> values = Lists.newArrayList();
                fromColumn.forEach(
                    column -> {
                        Object value = map.get(column);
                        values.add(value);
                    });
                insertValues.addAll(values);
            });
        return insertValues;
    }

    public List<Object> getInsertValuesSame(String records) {
        List<Object> insertValues = getInsertValues(records);
        if (useJsFlag == 0 && CollectionUtils.isNotEmpty(insertValues)) {
            // jsonPath解析
            int insertNum = Convert.convert(List.class, insertValues.get(0)).size();
            List<Object> insertValuesFinal = new ArrayList<>();
            for (int i = 0; i < insertNum; i++) {
                // List<Object> rowData = new ArrayList<>();
                for (Object insertValue : insertValues) {
                    List columnVal = Convert.convert(List.class, insertValue);
                    // rowData.add(columnVal.get(i));
                    if (CollectionUtils.isNotEmpty(columnVal) && columnVal.size() >= i + 1) {
                        insertValuesFinal.add(columnVal.get(i));
                    } else {
                        insertValuesFinal.add(null);
                    }
                }
            }
            return insertValuesFinal;
        }
        return insertValues;
    }

    public PreparedStatement createPreparedStatement(Connection conn, String sql, Object... params)
        throws SQLException {
        PreparedStatement statement = conn.prepareStatement(sql);
        if (params == null || params.length == 0) {
            return statement;
        }
        for (int index = 0; index < params.length; index++) {
            Object param = params[index];
            if (param instanceof String) {
                statement.setString(index + 1, (String) param);
            } else if (param instanceof Long) {
                statement.setLong(index + 1, (Long) param);
            } else if (param instanceof Integer) {
                statement.setInt(index + 1, (Integer) param);
            } else {
                statement.setString(index + 1, param.toString());
                // throw new SQLException("not support param type ");
            }
        }
        return statement;
    }
}
