package com.central.datax.plugin.reader.hivereader;

import com.alibaba.datax.common.element.*;
import com.alibaba.datax.common.exception.DataXException;
import com.alibaba.datax.common.plugin.RecordSender;
import com.alibaba.datax.common.plugin.TaskPluginCollector;
import com.alibaba.datax.common.spi.Reader;
import com.alibaba.datax.common.statistics.PerfRecord;
import com.alibaba.datax.common.statistics.PerfTrace;
import com.alibaba.datax.common.util.Configuration;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;

/**
 * @author Tindy
 * @date 2022/3/7
 * @describe
 */
public class HiveReader  extends Reader {

    public static class Job extends Reader.Job {
        private static final Logger LOG = LoggerFactory
                .getLogger(Job.class);

        private Configuration originalConfig = null;
        private HiveHelper hiveHelper;

        @Override
        public List<Configuration> split(int adviceNumber) {
            //查询所有分区
            Collection<List<String>> partitionContitions = hiveHelper.getPartitionContitions(adviceNumber);
            String table = originalConfig.getString(Key.TABLE);
            List<String> column = originalConfig.getList(Key.COLUMN,String.class);
            String columnSql=StringUtils.join(column,",");
            String where = originalConfig.getString(Key.WHERE, "");
            boolean hasWhere = StringUtils.isNotBlank(where);
            List<Configuration> splittedConfigs = new ArrayList<Configuration>();
            if(partitionContitions.size()>0){
                for (List<String> partitionContition : partitionContitions) {
                    String tempQuerySql = buildQuerySql(columnSql, table, where)
                            + (hasWhere ? " and " : " where ") +"(" +StringUtils.join(partitionContition," OR ")+")";
                    Configuration splittedConfig = originalConfig.clone();
                    splittedConfig.set(Key.QUERY_SQL,tempQuerySql);
                    splittedConfigs.add(splittedConfig);
                }
            }else {
                originalConfig.set(Key.QUERY_SQL,buildQuerySql(columnSql, table, where));
                splittedConfigs.add(originalConfig);
            }
            return splittedConfigs;
        }


        public static String buildQuerySql(String column, String table,
                                           String where) {
            String querySql;

            if (StringUtils.isBlank(where)) {
                querySql = String.format(Constant.QUERY_SQL_TEMPLATE_WITHOUT_WHERE,
                        column, table);
            } else {
                querySql = String.format(Constant.QUERY_SQL_TEMPLATE, column,
                        table, where);
            }

            return querySql;
        }
        @Override
        public void init() {
            this.originalConfig = super.getPluginJobConf();
            String splitPy = this.originalConfig.getString(Key.SPLIT_PK);
            if (splitPy != null) {
                LOG.warn("对 hivereader 不需要配置 splitPy, hivereader 将会使用分区字段替换该配置， 如果您不想再看到此警告,请去除splitPy 配置.");
            }
            validateParameter();
            this.hiveHelper=new HiveHelper(originalConfig);
        }

        @Override
        public void destroy() {
            this.hiveHelper.closeConn();
        }

        private void validateParameter() {
            //Kerberos check
            Boolean haveKerberos = this.originalConfig.getBool(Key.HAVE_KERBEROS, false);
            if(haveKerberos) {
                this.originalConfig.getNecessaryValue(Key.KERBEROS_CONF_FILE_PATH,HiveReaderErrorCode.REQUIRED_VALUE);
                this.originalConfig.getNecessaryValue(Key.KERBEROS_KEYTAB_FILE_PATH, HiveReaderErrorCode.REQUIRED_VALUE);
                this.originalConfig.getNecessaryValue(Key.KERBEROS_PRINCIPAL, HiveReaderErrorCode.REQUIRED_VALUE);
            }
            this.originalConfig.getNecessaryValue(Key.JDBC_URL,HiveReaderErrorCode.REQUIRED_VALUE);
            this.originalConfig.getNecessaryValue(Key.TABLE,HiveReaderErrorCode.REQUIRED_VALUE);
            this.originalConfig.getNecessaryValue(Key.COLUMN,HiveReaderErrorCode.REQUIRED_VALUE);

        }
    }

    public static class Task extends Reader.Task {
        private static final Logger LOG = LoggerFactory
                .getLogger(Task.class);
        private static final boolean IS_DEBUG = LOG.isDebugEnabled();
        private String mandatoryEncoding;
        private Configuration readerSliceConfig;
        private String basicMsg;
        private HiveHelper hiveHelper;
        protected final byte[] EMPTY_CHAR_ARRAY = new byte[0];


        @Override
        public void startRead(RecordSender recordSender) {
            int fetchSize = this.readerSliceConfig.getInt(Constant.FETCH_SIZE,Constant.DEFAULT_FETCH_SIZE);
            String querySql = readerSliceConfig.getString(Key.QUERY_SQL);
            String table = readerSliceConfig.getString(Key.TABLE);

            PerfTrace.getInstance().addTaskDetails(super.getTaskId(), table + "," + basicMsg);

            LOG.info("Begin to read record by Sql: [{}\n] {}.",
                    querySql, basicMsg);
            PerfRecord queryPerfRecord = new PerfRecord(super.getTaskGroupId(),super.getTaskId(), PerfRecord.PHASE.SQL_QUERY);
            queryPerfRecord.start();

            int columnNumber = 0;
            ResultSet rs = null;
            try {
                rs = hiveHelper.query(querySql, fetchSize);
                queryPerfRecord.end();

                ResultSetMetaData metaData = rs.getMetaData();
                columnNumber = metaData.getColumnCount();

                //这个统计干净的result_Next时间
                PerfRecord allResultPerfRecord = new PerfRecord(super.getTaskGroupId(),super.getTaskId(), PerfRecord.PHASE.RESULT_NEXT_ALL);
                allResultPerfRecord.start();

                long rsNextUsedTime = 0;
                long lastTime = System.nanoTime();
                while (rs.next()) {
                    rsNextUsedTime += (System.nanoTime() - lastTime);
                    this.transportOneRecord(recordSender, rs,
                            metaData, columnNumber, mandatoryEncoding, super.getTaskPluginCollector());
                    lastTime = System.nanoTime();
                }

                allResultPerfRecord.end(rsNextUsedTime);
                //目前大盘是依赖这个打印，而之前这个Finish read record是包含了sql查询和result next的全部时间
                LOG.info("Finished read record by Sql: [{}\n] {}.",
                        querySql, basicMsg);

            }catch (Exception e) {
                throw DataXException.asDataXException(HiveReaderErrorCode.QUERY_ERROR,querySql, e );
            }
        }

        @Override
        public void init() {
            this.readerSliceConfig = super.getPluginJobConf();
            this.mandatoryEncoding = readerSliceConfig.getString(Key.MANDATORY_ENCODING, "");
            basicMsg = String.format("jdbcUrl:[%s]", this.readerSliceConfig.getString(Key.JDBC_URL));
            this.hiveHelper=new HiveHelper(readerSliceConfig);
        }

        @Override
        public void destroy() {
            hiveHelper.closeConn();
        }

        protected Record transportOneRecord(RecordSender recordSender, ResultSet rs,
                                            ResultSetMetaData metaData, int columnNumber, String mandatoryEncoding,
                                            TaskPluginCollector taskPluginCollector) {
            Record record = buildRecord(recordSender,rs,metaData,columnNumber,mandatoryEncoding,taskPluginCollector);
            recordSender.sendToWriter(record);
            return record;
        }
        protected Record buildRecord(RecordSender recordSender,ResultSet rs, ResultSetMetaData metaData, int columnNumber, String mandatoryEncoding,
                                     TaskPluginCollector taskPluginCollector) {
            Record record = recordSender.createRecord();

            try {
                for (int i = 1; i <= columnNumber; i++) {
                    switch (metaData.getColumnType(i)) {

                        case Types.CHAR:
                        case Types.NCHAR:
                        case Types.VARCHAR:
                        case Types.LONGVARCHAR:
                        case Types.NVARCHAR:
                        case Types.LONGNVARCHAR:
                            String rawData;
                            if(StringUtils.isBlank(mandatoryEncoding)){
                                rawData = rs.getString(i);
                            }else{
                                rawData = new String((rs.getBytes(i) == null ? EMPTY_CHAR_ARRAY :
                                        rs.getBytes(i)), mandatoryEncoding);
                            }
                            record.addColumn(new StringColumn(rawData));
                            break;

                        case Types.CLOB:
                        case Types.NCLOB:
                            record.addColumn(new StringColumn(rs.getString(i)));
                            break;

                        case Types.SMALLINT:
                        case Types.TINYINT:
                        case Types.INTEGER:
                        case Types.BIGINT:
                            record.addColumn(new LongColumn(rs.getString(i)));
                            break;

                        case Types.NUMERIC:
                        case Types.DECIMAL:
                            record.addColumn(new DoubleColumn(rs.getString(i)));
                            break;

                        case Types.FLOAT:
                        case Types.REAL:
                        case Types.DOUBLE:
                            record.addColumn(new DoubleColumn(rs.getString(i)));
                            break;

                        case Types.TIME:
                            record.addColumn(new DateColumn(rs.getTime(i)));
                            break;

                        // for mysql bug, see http://bugs.mysql.com/bug.php?id=35115
                        case Types.DATE:
                            if (metaData.getColumnTypeName(i).equalsIgnoreCase("year")) {
                                record.addColumn(new LongColumn(rs.getInt(i)));
                            } else {
                                record.addColumn(new DateColumn(rs.getDate(i)));
                            }
                            break;

                        case Types.TIMESTAMP:
                            record.addColumn(new DateColumn(rs.getTimestamp(i)));
                            break;

                        case Types.BINARY:
                        case Types.VARBINARY:
                        case Types.BLOB:
                        case Types.LONGVARBINARY:
                            record.addColumn(new BytesColumn(rs.getBytes(i)));
                            break;

                        // warn: bit(1) -> Types.BIT 可使用BoolColumn
                        // warn: bit(>1) -> Types.VARBINARY 可使用BytesColumn
                        case Types.BOOLEAN:
                        case Types.BIT:
                            record.addColumn(new BoolColumn(rs.getBoolean(i)));
                            break;

                        case Types.NULL:
                            String stringData = null;
                            if(rs.getObject(i) != null) {
                                stringData = rs.getObject(i).toString();
                            }
                            record.addColumn(new StringColumn(stringData));
                            break;

                        default:
                            throw DataXException
                                    .asDataXException(
                                            HiveReaderErrorCode.UNSUPPORTED_TYPE,
                                            String.format(
                                                    "您的配置文件中的列配置信息有误. 因为DataX 不支持数据库读取这种字段类型. 字段名:[%s], 字段名称:[%s], 字段Java类型:[%s]. 请尝试使用数据库函数将其转换datax支持的类型 或者不同步该字段 .",
                                                    metaData.getColumnName(i),
                                                    metaData.getColumnType(i),
                                                    metaData.getColumnClassName(i)));
                    }
                }
            } catch (Exception e) {
                if (IS_DEBUG) {
                    LOG.debug("read data " + record.toString()
                            + " occur exception:", e);
                }
                //TODO 这里识别为脏数据靠谱吗？
                taskPluginCollector.collectDirtyRecord(record, e);
                if (e instanceof DataXException) {
                    throw (DataXException) e;
                }
            }
            return record;
        }
    }
}
