package beautiful.butterfly.drds.data_exchange.read_and_write_data.reader;

import beautiful.butterfly.drds.data_exchange.data_exchange.exchanger.Produce;
import beautiful.butterfly.drds.data_exchange.data_exchange.record.column.*;
import beautiful.butterfly.drds.data_exchange.error_code.DBUtilErrorCode;
import beautiful.butterfly.drds.data_exchange.error_code.DataExchangeException;
import beautiful.butterfly.drds.data_exchange.plugin_collector.AbstractTaskPluginCollector;
import beautiful.butterfly.drds.data_exchange.read_and_write_data.reader.util.OriginalConfPretreatmentUtil;
import beautiful.butterfly.drds.data_exchange.read_and_write_data.reader.util.PreCheckTask;
import beautiful.butterfly.drds.data_exchange.read_and_write_data.reader.util.ReaderSplitUtil;
import beautiful.butterfly.drds.data_exchange.statistics.PerfTrace;
import beautiful.butterfly.drds.data_exchange.statistics.Phase;
import beautiful.butterfly.drds.data_exchange.statistics.RunningInfo;
import beautiful.butterfly.drds.data_exchange.util.Configuration;
import beautiful.butterfly.drds.data_exchange.util.Jdbc;
import beautiful.butterfly.drds.data_exchange.util.Jdbcs1;
import beautiful.butterfly.drds.data_exchange.util.RdbmsException;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;


public class Reader
{

    public static class Job
    {
        private static final Logger LOG = LoggerFactory
                .getLogger(Job.class);

        public Job()
        {


        }

        public void init(Configuration originalConfig) throws SQLException
        {

            OriginalConfPretreatmentUtil.doPretreatment(originalConfig);

            LOG.debug("After job init(), job config now is:[\n{}\n]",
                    originalConfig.toJSON());
        }

        public void preCheck(Configuration originalConfig)
        {
            /*检查每个表是否有读权限，以及querySql跟splik Key是否正确*/
            Configuration queryConf = ReaderSplitUtil.doPreCheckSplit(originalConfig);
            String splitPK = queryConf.getString(Key.split_key);
            List<Object> connList = queryConf.getList(Constant.CONN_MARK, Object.class);
            String username = queryConf.getString(Key.username);
            String password = queryConf.getString(Key.password);
            ExecutorService executorService;
            if (connList.size() < 10)
            {
                executorService = Executors.newFixedThreadPool(connList.size());
            } else
            {
                executorService = Executors.newFixedThreadPool(10);
            }
            Collection<PreCheckTask> taskList = new ArrayList<PreCheckTask>();
            for (int i = 0, size = connList.size(); i < size; i++)
            {
                Configuration configuration = Configuration.from(connList.get(i).toString());
                PreCheckTask t = new PreCheckTask(username, password, configuration, splitPK);
                taskList.add(t);
            }
            List<Future<Boolean>> futureList = Lists.newArrayList();
            try
            {
                futureList = executorService.invokeAll(taskList);
            } catch (InterruptedException e)
            {
                Thread.currentThread().interrupt();
            }

            for (Future<Boolean> future : futureList)
            {
                try
                {
                    future.get();
                } catch (ExecutionException e)
                {
                    DataExchangeException de = (DataExchangeException) e.getCause();
                    throw de;
                } catch (InterruptedException e)
                {
                    Thread.currentThread().interrupt();
                }
            }
            executorService.shutdownNow();
        }


        public List<Configuration> split(Configuration originalConfig,
                                         int adviceNumber) throws SQLException
        {
            return ReaderSplitUtil.doSplit(originalConfig, adviceNumber);
        }

        public void post(Configuration originalConfig)
        {
            // do nothing
        }

        public void destroy(Configuration originalConfig)
        {
            // do nothing
        }

    }

    @Slf4j
    public static class Task
    {


        protected final byte[] EMPTY_CHAR_ARRAY = new byte[0];


        private int taskGroupId = -1;
        private int taskId = -1;

        private String username;
        private String password;
        private String jdbcUrl;
        private String mandatoryEncoding;

        // 作为日志显示信息时，需要附带的通用信息。比如信息所对应的数据库连接等信息，针对哪个表做的操作
        private String basicMsg;

        public Task()
        {
            this(-1, -1);
        }

        public Task(int taskGropuId, int taskId)
        {

            this.taskGroupId = taskGropuId;
            this.taskId = taskId;
        }

        public void init(Configuration readerSliceConfig)
        {

			/* for database connection */

            this.username = readerSliceConfig.getString(Key.username);
            this.password = readerSliceConfig.getString(Key.password);
            this.jdbcUrl = readerSliceConfig.getString(Key.jdbc_url);


            this.mandatoryEncoding = readerSliceConfig.getString(Key.mandatory_encoding, "");

            basicMsg = String.format("jdbcUrl:[%s]", this.jdbcUrl);

        }

        public void startRead(Configuration configuration,
                              Produce produce,
                              AbstractTaskPluginCollector abstractTaskPluginCollector) throws SQLException
        {
            String querySql = configuration.getString(Key.query_sql);
            String tableName = configuration.getString(Key.table_name);

            PerfTrace.getInstance().addTaskDetails(taskId, tableName + "," + basicMsg);

            log.info("Begin to read record by Sql: [{}\n] {}.", querySql, basicMsg);
            RunningInfo queryRunningInfo = new RunningInfo(taskGroupId, taskId, Phase.sql_query);
            queryRunningInfo.start();

            Connection connection = Jdbc.getConnectionWithRetry(jdbcUrl, username, password);

            // session config .etc related
            Jdbcs1.dealWithSessionConfig(connection, configuration,
                    basicMsg);

            int columnNumber = 0;
            ResultSet resultSet = null;
            try
            {
                resultSet = Jdbc.executeQuery(connection, querySql);
                queryRunningInfo.end();

                ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
                columnNumber = resultSetMetaData.getColumnCount();

                //这个统计干净的result_Next时间
                RunningInfo allResultRunningInfo = new RunningInfo(taskGroupId, taskId, Phase.result_next_all);
                allResultRunningInfo.start();

                long rsNextUsedTime = 0;
                long lastTime = System.nanoTime();
                while (resultSet.next())
                {
                    rsNextUsedTime += (System.nanoTime() - lastTime);
                    this.transportOneRecord(produce, resultSet,
                            resultSetMetaData, columnNumber, mandatoryEncoding, abstractTaskPluginCollector);
                    lastTime = System.nanoTime();
                }

                allResultRunningInfo.end(rsNextUsedTime);
                //目前大盘是依赖这个打印，而之前这个Finish read record是包含了sql查询和result next的全部时间
                log.info("Finished read record by Sql: [{}\n] {}.",
                        querySql, basicMsg);

            } catch (Exception e)
            {
                throw RdbmsException.asQueryException(e, querySql, tableName, username);
            } finally
            {
                Jdbc.close(null, connection);
            }
        }

        public void post(Configuration configuration)
        {
            // do nothing
        }

        public void destroy(Configuration configuration)
        {
            // do nothing
        }

        protected Record transportOneRecord(Produce produce, ResultSet resultSet,
                                            ResultSetMetaData resultSetMetaData, int columnNumber, String mandatoryEncoding,
                                            AbstractTaskPluginCollector abstractTaskPluginCollector)
        {
            Record record = buildRecord(produce, resultSet, resultSetMetaData, columnNumber, mandatoryEncoding, abstractTaskPluginCollector);
            produce.addToRecordList(record);
            return record;
        }

        protected Record buildRecord(Produce produce, ResultSet resultSet, ResultSetMetaData resultSetMetaData, int columnNumber, String mandatoryEncoding,
                                     AbstractTaskPluginCollector abstractTaskPluginCollector)
        {
            Record record = produce.createRecord();

            try
            {
                for (int i = 1; i <= columnNumber; i++)
                {
                    switch (resultSetMetaData.getColumnType(i))
                    {

                        case Types.CHAR:
                        case Types.NCHAR:
                        case Types.VARCHAR:
                        case Types.LONGVARCHAR:
                        case Types.NVARCHAR:
                        case Types.LONGNVARCHAR:
                            String rawData;
                            if (StringUtils.isBlank(mandatoryEncoding))
                            {
                                rawData = resultSet.getString(i);
                            } else
                            {
                                rawData = new String((resultSet.getBytes(i) == null ? EMPTY_CHAR_ARRAY :
                                        resultSet.getBytes(i)), mandatoryEncoding);
                            }
                            record.addColumn(new StringColumn(rawData));
                            break;

                        case Types.CLOB:
                        case Types.NCLOB:
                            record.addColumn(new StringColumn(resultSet.getString(i)));
                            break;

                        case Types.SMALLINT:
                        case Types.TINYINT:
                        case Types.INTEGER:
                        case Types.BIGINT:
                            record.addColumn(new LongColumn(resultSet.getString(i)));
                            break;

                        case Types.NUMERIC:
                        case Types.DECIMAL:
                            record.addColumn(new DoubleColumn(resultSet.getString(i)));
                            break;

                        case Types.FLOAT:
                        case Types.REAL:
                        case Types.DOUBLE:
                            record.addColumn(new DoubleColumn(resultSet.getString(i)));
                            break;

                        case Types.TIME:
                            record.addColumn(new DateColumn(resultSet.getTime(i)));
                            break;

                        // for mysql bug, see http://bugs.mysql.com/bug.php?id=35115
                        case Types.DATE:
                            if (resultSetMetaData.getColumnTypeName(i).equalsIgnoreCase("year"))
                            {
                                record.addColumn(new LongColumn(resultSet.getInt(i)));
                            } else
                            {
                                record.addColumn(new DateColumn(resultSet.getDate(i)));
                            }
                            break;

                        case Types.TIMESTAMP:
                            record.addColumn(new DateColumn(resultSet.getTimestamp(i)));
                            break;

                        case Types.BINARY:
                        case Types.VARBINARY:
                        case Types.BLOB:
                        case Types.LONGVARBINARY:
                            record.addColumn(new BytesColumn(resultSet.getBytes(i)));
                            break;

                        case Types.BOOLEAN:
                        case Types.BIT:
                            record.addColumn(new BooleanColumn(resultSet.getBoolean(i)));
                            break;

                        case Types.NULL:
                            String stringData = null;
                            if (resultSet.getObject(i) != null)
                            {
                                stringData = resultSet.getObject(i).toString();
                            }
                            record.addColumn(new StringColumn(stringData));
                            break;

                        default:
                            throw DataExchangeException
                                    .asDataExchangeException(
                                            DBUtilErrorCode.UNSUPPORTED_TYPE,
                                            String.format(
                                                    "您的配置文件中的列配置信息有误. 因为DataX 不支持数据库读取这种字段类型. 字段名:[%s], 字段名称:[%s], 字段Java类型:[%s]. 请尝试使用数据库函数将其转换datax支持的类型 或者不同步该字段 .",
                                                    resultSetMetaData.getColumnName(i),
                                                    resultSetMetaData.getColumnType(i),
                                                    resultSetMetaData.getColumnClassName(i)));
                    }
                }
            } catch (Exception e)
            {

                //TODO 这里识别为脏数据靠谱吗？
                abstractTaskPluginCollector.collectDirtyRecord(record, e);
                if (e instanceof DataExchangeException)
                {
                    throw (DataExchangeException) e;
                }
            }
            return record;
        }
    }

}
