package org.poem.component;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.otter.canal.protocol.CanalEntry;
import com.google.common.collect.Lists;
import org.poem.CanalApplicationContext;
import org.poem.CanalClientHeadEntityVO;
import org.poem.ContextDatabase;
import org.poem.ExecTaskDetailPlanVO;
import org.poem.enums.EnumDataType;
import org.poem.exception.CanalClientException;
import org.poem.kafka.KafkaProducer;
import org.poem.sql.SqlUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;

import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;

import static org.poem.sql.SqlUtils.MAX_SIZE;

/**
 * @author poem
 */
public class MysqlTransformOutComponent {

    private static final Logger logger = LoggerFactory.getLogger(MysqlTransformOutComponent.class);




    private static ExecutorService threadPoolExecutor;

    /**
     * init thead pool
     */
    static {
        int core = Runtime.getRuntime().availableProcessors();
        AtomicInteger integer = new AtomicInteger(0);
        threadPoolExecutor = new ThreadPoolExecutor(core, core * 2, 1000, TimeUnit.MILLISECONDS, new ArrayBlockingQueue<Runnable>(core),
                new ThreadFactory() {
                    @Override
                    public Thread newThread(Runnable r) {
                        //线程命名
                        return new Thread(r, "mysql-insert-" + integer.incrementAndGet());
                    }
                }, new ThreadPoolExecutor.CallerRunsPolicy());
    }

    /**
     * 开始导数据了
     *
     * @param dataTransformVO 信息
     * @param sourceJdbc      元数据
     * @param targetJdbc      目标数据
     * @throws SQLException
     */
    public void importData(ExecTaskDetailPlanVO dataTransformVO, JdbcTemplate sourceJdbc, JdbcTemplate targetJdbc) throws SQLException, CanalClientException {
        if (ContextDatabase.getAppSchema() == null) {
            throw new CanalClientException("app schema is null, EnumDataType.MYSQL , EnumDataType.POSTGRES ,EnumDataType.ORACLE ");
        }
        //source 一定是mysql
        createTable(dataTransformVO, sourceJdbc);
        exportData(dataTransformVO, sourceJdbc);
    }


    /**
     * 创建表
     *
     * @param dataTransformVO
     */
    private void createTable(ExecTaskDetailPlanVO dataTransformVO, JdbcTemplate sourceJdbc) {
        //来源表
        String schema = dataTransformVO.getSourceSchema();
        List<Map<String, Object>> colums =
                sourceJdbc.queryForList("SELECT COLUMN_NAME,DATA_TYPE,IS_NULLABLE, COLUMN_TYPE,COLUMN_KEY FROM information_schema.COLUMNS WHERE TABLE_NAME='"
                        + dataTransformVO.getTargetTable() + "' AND  TABLE_SCHEMA = '" + schema + "'");

        List<String> colunmsType = Lists.newArrayList();
        List<String> columnNames = Lists.newArrayList();
        List<String> keyList = Lists.newArrayList();
        for (Map<String, Object> colum : colums) {
            columnNames.add(colum.get("COLUMN_NAME") + "");
            colunmsType.add(colum.get("DATA_TYPE") + "");
            if (null != colum.get("COLUMN_KEY") && "PRI".equals(String.valueOf(colum.get("COLUMN_KEY")))) {
                keyList.add(String.valueOf(colum.get("COLUMN_NAME")));
            }
        }
        Map<String, String> zipColumnTypes = SqlUtils.zipColumnTypes(columnNames, colunmsType);
        Map<String, String> zipColumnKey = SqlUtils.zipColumnTypes(columnNames, keyList);
        String createTableSql = SqlUtils.getCreateTableSql(dataTransformVO.getTargetTable(),
                zipColumnTypes, zipColumnKey, ContextDatabase.getAppSchema());


        if (logger.isDebugEnabled()) {
            logger.info("[" + dataTransformVO.getTargetTable() + "] ======================================= ");
            logger.info("[" + dataTransformVO.getTargetTable() + "]\n" + createTableSql);
            logger.info("[" + dataTransformVO.getTargetTable() + "] ======================================= ");
        }

        CanalClientHeadEntityVO createTable = new CanalClientHeadEntityVO();
        createTable.setSchemaName(schema);
        createTable.setSql(Collections.singletonList(createTableSql));
        createTable.setEventType(CanalEntry.EventType.CREATE);
        createTable.setTableName(dataTransformVO.getTargetTable());

        CanalClientHeadEntityVO canalClientHeadEntityVO = new CanalClientHeadEntityVO();
        canalClientHeadEntityVO.setSchemaName(schema);
        canalClientHeadEntityVO.setSql(Collections.singletonList("drop table if exists " + dataTransformVO.getTargetTable()));
        canalClientHeadEntityVO.setEventType(CanalEntry.EventType.DELETE);
        canalClientHeadEntityVO.setTableName(dataTransformVO.getTargetTable());
        KafkaProducer kafkaProducer = (KafkaProducer) CanalApplicationContext.getBean(KafkaProducer.class);
        kafkaProducer.sendMessage(JSONObject.toJSONString(canalClientHeadEntityVO));
        kafkaProducer.sendMessage(JSONObject.toJSONString(createTable));

    }


    /**
     * 导数据
     *
     * @param dataTransformVO
     */
    private void exportData(ExecTaskDetailPlanVO dataTransformVO, JdbcTemplate sourceJdbc) throws SQLException {
        String schema = dataTransformVO.getSourceSchema();
        //来源表
        Long sum = sourceJdbc.queryForObject("SELECT sum(1) as su FROM " + schema + "." + dataTransformVO.getTargetTable(), Long.class);
        long dataSize = sum == null ? 0 : sum;

        List<Map<String, Object>> colums = sourceJdbc.queryForList("select COLUMN_NAME,DATA_TYPE,IS_NULLABLE from information_schema.COLUMNS " +
                "where table_name='" + dataTransformVO.getTargetTable() + "' and  TABLE_SCHEMA = '" + schema + "'");
        List<String> colnums = Lists.newArrayList();
        List<String> types = Lists.newArrayList();
        List<String> isNull = Lists.newArrayList();
        for (Map<String, Object> colum : colums) {
            colnums.add(String.valueOf(colum.get("COLUMN_NAME")));
            types.add(String.valueOf(colum.get("DATA_TYPE")));
            isNull.add(String.valueOf(colum.get("IS_NULLABLE")));
        }

        if (logger.isDebugEnabled()) {
            logger.debug("[" + dataTransformVO.getTargetTable() + "]Import Data ：" + dataSize);
        }
        Map<String, String> zipColumnTypes = SqlUtils.zipColumnTypes(colnums, types);
        EnumDataType targetEnumDataType = ContextDatabase.getAppSchema();
        long index = (int) (dataSize / MAX_SIZE) + 1;
        for (long i = 0; i < index; i++) {
            long in = i;
            threadPoolExecutor.submit(new Runnable() {
                @Override
                public void run() {
                    long start = (in) * MAX_SIZE;
                    ContextDatabase.setAppSchema(targetEnumDataType);
                    logger.info("[" + dataTransformVO.getTargetTable() + "] SELECT * FROM " + schema + "." + dataTransformVO.getTargetTable() + " LIMIT " + start + " , " + MAX_SIZE);
                    List<Map<String, Object>> rs = sourceJdbc.queryForList("SELECT * FROM " + dataTransformVO.getTargetTable() + " LIMIT " + start + " , " + MAX_SIZE);
                    List<String> insertSql = createInsertSql(dataTransformVO, rs, colnums, zipColumnTypes);
                    CanalClientHeadEntityVO canalClientHeadEntityVO = new CanalClientHeadEntityVO();
                    canalClientHeadEntityVO.setSchemaName(schema);
                    canalClientHeadEntityVO.setSql(insertSql);
                    canalClientHeadEntityVO.setEventType(CanalEntry.EventType.INSERT);
                    canalClientHeadEntityVO.setTableName(dataTransformVO.getTargetTable());
                    KafkaProducer kafkaProducer = (KafkaProducer) CanalApplicationContext.getBean(KafkaProducer.class);
                    kafkaProducer.sendMessage(JSONObject.toJSONString(canalClientHeadEntityVO));
                }
            });
        }
    }

    /**
     * 创建插入的sql
     *
     * @param dataTransformVO
     * @param metadata
     * @param colnums
     * @param zipColumnTypes
     * @return
     */
    private List<String> createInsertSql(ExecTaskDetailPlanVO dataTransformVO, List<Map<String, Object>> metadata, List<String> colnums,
                                         Map<String, String> zipColumnTypes) {
        String table = dataTransformVO.getTargetTable();
        return SqlUtils.createInsertSql(table, metadata, colnums, zipColumnTypes, ContextDatabase.getAppSchema());
    }

}
