package belf.migrate.engine.job.jdbc;

import belf.migrate.api.model.QualifiersModel;
import belf.migrate.api.sink.jdbc.helper.ColumnRuleHelper;
import belf.migrate.api.table.schema.Catalog;
import belf.migrate.api.table.schema.JdbcExecuteResult;
import belf.migrate.api.table.schema.TablePath;
import belf.migrate.api.taskconf.*;
import belf.migrate.api.type.LogLevelType;
import belf.migrate.api.util.PostLog;
import belf.migrate.api.job.Job;
import com.alibaba.druid.pool.DruidDataSource;
import lombok.extern.slf4j.Slf4j;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static belf.migrate.api.job.JobCode.SUCCESS;
import static belf.migrate.api.job.JobCode.FAILTURE;

@Slf4j
public class JDBCWriter implements Runnable {
    private final Job job;
    private final JobContext jobContext;
    private final TableSyncConf tableSyncConf;
    private final Pipe pipe;
    private final ColumnRuleHelper columnRuleHelper;

    private final Task task;
    private final TaskConf taskConf;
    private final SinkConf sinkConf;
    private final Catalog sinkCatalog;
    private final TablePath tablePath;
    private boolean isPrimaryKey = true;

    private DruidDataSource dataSource;

    public JDBCWriter(Job job, JobContext jobContext, TableSyncConf tableSyncConf, Pipe pipe, ColumnRuleHelper columnRuleHelper) {
        this.job = job;
        this.jobContext = jobContext;
        this.tableSyncConf = tableSyncConf;
        this.pipe = pipe;
        this.columnRuleHelper = columnRuleHelper;

        this.task = job.getTask();
        this.taskConf = task.getTaskConf();
        this.sinkConf = task.getTaskConf().getSinkConf();
        this.sinkCatalog = jobContext.getSinkCatalog();

        String sinkDatabaseName = sinkConf.getDatabaseName();
        String sinkSchemaName = sinkConf.getSchemaName();
        String tableName = tableSyncConf.getTableName();
        QualifiersModel sinkQualifiersModel = jobContext.getSinkQualifiersModel();

        this.tablePath = TablePath.of(sinkDatabaseName, sinkSchemaName, tableName, sinkQualifiersModel);

        if ("employees".equals(tableName) || "film_actor".equals(tableName) || "film_category".equals(tableName)) {  // TODO for test
            isPrimaryKey = false;
        }

        this.dataSource = JDBCJobRunner.initDataSource(sinkConf.getConnectionConf());
    }

    private void executeInsert(TablePath tablePath, JdbcExecuteResult result) throws SQLException {
        if (null == result) {
            return;
        }

        // 字段读规则：数据库函数  TODO 字段名称大小写的问题
        Map<String, String> changeColumns = new HashMap<>();
        List<ColumnSyncConf> columnSyncConfs = tableSyncConf.getColumns();
        if (null != columnSyncConfs) {
            for (ColumnSyncConf columnSyncConf : columnSyncConfs) {
                ColumnRule columnRule = columnSyncConf.getWriteRule();
                if (ColumnRule.RuleType.FUNCTION.equals(columnRule.getRuleType())) {
                    changeColumns.put(columnSyncConf.getColumnName(), columnRule.getRuleContent());
                }
            }
        }
        log.info("changeColumns: {}", changeColumns);

        StringBuilder sb = new StringBuilder();
        sb.append("INSERT INTO ");
        sb.append(tablePath.getFullNameWithQuoted());
        sb.append(" (");
        List<String> columnNames = new ArrayList<>();
        if (tableSyncConf.getInsertColumns() != null && tableSyncConf.getInsertColumns().size() > 0) {
            columnNames.addAll(tableSyncConf.getInsertColumns());
        } else {
            result.getColumns().forEach(s -> columnNames.add(s.getColumnName()));
        }
        sb.append(String.join(", ", columnNames));
        sb.append(") VALUES (");
        List<String> placeholders = new ArrayList<>();
//        result.getColumns().forEach(s -> placeholders.add("?"));
        for (String columnName : columnNames) {
            placeholders.add(columnRuleHelper.get_placeholder(tablePath.getTableName(), columnName));
        }
        sb.append(String.join(", ", placeholders));
        sb.append(");");
        String sql = sb.toString();
        log.debug("SQL: {}", sql);
        try (Connection connection = dataSource.getConnection();
             PreparedStatement preparedStatement = connection.prepareStatement(sql)) {

            int j = 0;
            for (Map<String, Object> row : result.getRows()) {
                if (null == row) {
                    continue;
                }
                int i = 0;
                for (String columnName : columnNames) {
                    i++;
                    Object columnValue = row.get(columnName);
//                    log.info("preparedStatement setObject({}) {} = {}", i, columnName, columnValue);
                    preparedStatement.setObject(i, columnValue);
                }
                preparedStatement.addBatch();
            }
            if (isPrimaryKey) {
                synchronized (this) {  // SET IDENTITY_INSERT xxx ON 会被多线程并发破坏，同步代码块会影响插入性能
                    try (Statement statement = connection.createStatement()) {
                        try {
                            statement.executeUpdate("SET IDENTITY_INSERT " + sinkCatalog.getFullTableName(tablePath) + " ON;");
                        } catch (SQLException e) {

                        }
                        preparedStatement.executeBatch();
                        connection.commit();
                        pipe.wroteCount(result.getRows().size());
                        try {
                            statement.executeUpdate("SET IDENTITY_INSERT " + sinkCatalog.getFullTableName(tablePath) + " OFF;");
                        } catch (SQLException e) {

                        }
                    } catch (SQLException e) {
                        log.error("SQLException {}", sql, e);
                        throw e;
                    }
                }
            } else {
                preparedStatement.executeBatch();
                connection.commit();
                pipe.wroteCount(result.getRows().size());
            }

        } catch (SQLException e) {
            log.error("SQLException {}", sql, e);
            throw e;
        }
    }

    /*
    全量，分片写入，多线程
    增量，单线程
     */
// SET IDENTITY_INSERT "SAKILA"."actor" ON;
    @Override
    public void run() {
        while (true) {
            try {
                JdbcExecuteResult result = (JdbcExecuteResult) this.pipe.pop();
                if (null == result) {
//                    log.debug("{} pipe pop null.", this.pipe.name());
                    continue;
                }
                log.debug("{} Pipe POP {} Rows.", this.pipe.name(), result.getRows().size());
                Object last = null;
                for (Map<String, Object> row : result.getRows()) {
                    for (Map.Entry<String, Object> entry : row.entrySet()) {
                        if (entry.getKey().equalsIgnoreCase(tableSyncConf.getSortBy())) {
                            last = entry.getValue();
                        }

                        Object rv = columnRuleHelper.apply_write_rule(tablePath.getTableName(), entry.getKey(), entry.getValue());
//                        log.debug("Table: {}, Column: {}, Original: {}, New: {}", tablePath.getTableName(), entry.getKey(), entry.getValue(), rv);
                        entry.setValue(rv);
                    }
                }

                executeInsert(tablePath, result);

                log.info("Table: {}, last: {}", tablePath.getTableName(), last);
                if (pipe.getWrote() == pipe.getTotal()) { // 只在本次任务完成全部数据写入之后向console提交last值
                    log.info(pipe.getStatus());
                    PostLog.sendLogModel(job.getJobId(), job.getTask().getTaskType().getName(), pipe.getStatus(), LogLevelType.INFO.getType(), SUCCESS,
                            jobContext.getProgress());

                    if (last != null) {
                        PostLog.sendSyncStatus(new SyncStatus(job.getJobId(), task.getTaskId(), tablePath.getTableName(), last.toString()));
                    }

//                  TODO 停止读、写线程池 writeThreadPool.shutdown();
                    log.info("pipe.getWrote() == pipe.getTotal(), JDBCReader {} break", Thread.currentThread().getName());
                    break;
                }
//                return last;
            } catch (Exception e) {
                log.error("JDBC Write Data Exception", e);
                PostLog.sendLogModel(job.getJobId(), job.getTask().getTaskType().getName(), String.format("JDBC Writer Exception: %s", e.getMessage()), LogLevelType.FATAL.getType(), FAILTURE,
                        jobContext.getProgress());
                break;
            } finally {
                try {
                    Thread.sleep(10);
                } catch (InterruptedException e) {
                    log.error("Exception", e);
                }
            }
        }
    }


    public void close() {
        dataSource.close();
    }
}
