package subs;

import configer.SqlServerConfiger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import rocketmq.Producer;
import utils.PrintResultSetUtil;
import utils.Druid;

import java.sql.*;
import java.util.*;

public class SqlServerSub {

    protected final static Logger logger = LoggerFactory.getLogger(SqlServerSub.class);         // 日志对象

    private SqlServerConfiger sqlServerConfiger;                                                // 配置对象
    private Map<String, String> batchSqlServerLsn;

    public SqlServerSub(SqlServerConfiger sqlServerConfiger, Map<String, String> batchSqlServerLsn) {
        logger.info("实例化 <SqlServer数据库增量订阅> 处理对象");
        this.sqlServerConfiger = sqlServerConfiger;
        this.batchSqlServerLsn = batchSqlServerLsn;
    }


    public void testRun(Druid druid) {
        // 当测试不成功时，Flag++
        int Flag = -1;
        String sql = "USE " + sqlServerConfiger.getSqlServer_database() + ";"
                + "select TOP 1 * from cdc." + sqlServerConfiger.getSqlServer_schema()
                + "_" + sqlServerConfiger.getSqlServer_tableName() + "_ct"
                + " where __$start_lsn > 0x000000" + ";";
        do {
            logger.info("SqlServer增量订阅程序 执行前测试 ～");
            logger.info("数据库连接池 初始化");
            Connection conn = druid.getConnection();
            if (conn == null) {
                logger.info("SqlServer增量订阅程序 执行前测试不通过，无法继续进行。");
                druid.close(conn);
                try {
                    Thread.sleep(3000);
                    Flag++;
                    continue;
                } catch (InterruptedException e) {
                    logger.error("Thread.sleep 失败 { " + e + " } ");
                }
            }
            Statement stmt = druid.getStmt(conn);
            if (stmt == null) {
                logger.info("SqlServer增量订阅程序 执行前测试不通过，无法继续进行。");
                druid.close(stmt, conn);
                try {
                    Thread.sleep(3000);
                    Flag++;
                    continue;
                } catch (InterruptedException e) {
                    logger.error("Thread.sleep 失败 { " + e + " } ");
                }
            }
            try {
                if (!Is_CDC_Enabled_Table(druid)) {
                    try {
                        Enable_CDC_Table(druid);
                    } catch (Exception e) {
                        logger.error("开启CDC失败", e);
                    }
                }
                ResultSet rs = stmt.executeQuery(sql);
                logger.info("获取 ResultSet <" + rs + "> 成功");
                PrintResultSetUtil.Print(rs);
                druid.close(rs, stmt, conn);
                Flag = 1000;
            } catch (SQLException throwables) {
                logger.error("获取 ResultSet 出错 { " + throwables + " } ");
                logger.info("SqlServer增量订阅程序 执行前测试不通过，无法继续进行。");
                try {
                    Thread.sleep(3000);
                    Flag++;
                    druid.close(stmt, conn);
                    continue;
                } catch (InterruptedException e) {
                    logger.error("Thread.sleep 失败 { " + e + " } ");
                }
            }
        } while (Flag < 0);

        if (Flag != 1000) {
            System.exit(0);
        }
    }


    public void run(Druid druid) {
        boolean flag = true;
        logger.info("SqlServer增量订阅程序 开始执行 ～");
        // 初始化 sqlServerLsn
        initSqlServerLsn();
        //判断指定数据库是否启用CDC功能
        if (!Is_CDC_Enabled_Database(druid)) {
            try {
                Enable_CDC_Table(druid);
            } catch (Exception e) {
                logger.error("## something goes wrong when getting incremental Data in SqlServer:", e);
            }
        }
        //判断指定表是否启用CDC功能
        if (!Is_CDC_Enabled_Table(druid)) {
            try {
                Enable_CDC_Table(druid);
            } catch (Exception e) {
                logger.error("## something goes wrong when getting incremental Data in SqlServer:", e);
            }
        } else {
            // 清空之前的变更捕获数据
            wipeData(druid);
        }
        // 获取一个批次的数据
        while (flag) {
            flag = Get_Incremental_Data(druid);
        }
    }


    /**
     * @Descriotion 初始化Lsn
     */
    private void initSqlServerLsn() {
        String lsn = batchSqlServerLsn.get(sqlServerConfiger.getSqlServer_database()
                + "." + sqlServerConfiger.getSqlServer_schema()
                + "." + sqlServerConfiger.getSqlServer_tableName());
        if (lsn == null) {
            synchronized (batchSqlServerLsn) {
                batchSqlServerLsn.put(sqlServerConfiger.getSqlServer_database()
                        + "." + sqlServerConfiger.getSqlServer_schema()
                        + "." + sqlServerConfiger.getSqlServer_tableName(), "000000");
            }
        }
    }

    /**
     * @Descriotion 判断指定数据库是否启用CDC功能，返回值为1说明已启用CDC功能
     */
    public boolean Is_CDC_Enabled_Database(Druid druid) {
        Connection conn = druid.getConnection();
        Statement stmt = druid.getStmt(conn);
        //执行SQL语句
        String sql = "SELECT is_cdc_enabled FROM sys.databases WHERE name = '"
                + sqlServerConfiger.getSqlServer_database() + "';";
        logger.info("SQL语句(判断指定数据库是否启用CDC功能)为 " + sql);
        ResultSet result = null;
        int is_cdc_enabled = 0;
        try {
            result = stmt.executeQuery(sql);
            if (result.next()) {
                is_cdc_enabled = result.getInt(1);
            }
        } catch (SQLException e) {
            logger.error("## something goes wrong when executing SQL statement to inquire whether the database enable CDC:", e);
        }
        if (is_cdc_enabled == 1) {
            return true;
        }
        return false;
    }


    /**
     * @Descriotion 在数据库级启用CDC
     */
    public void Enable_CDC_Database(Druid druid) {
        Connection conn = druid.getConnection();
        Statement stmt = druid.getStmt(conn);
        String sql = "USE" + sqlServerConfiger.getSqlServer_database() + ";" + "EXEC sys.sp_cdc_enable_db;";
        //执行SQL语句
        try {
            ResultSet result = stmt.executeQuery(sql);
        } catch (SQLException e) {
            logger.error("## something goes wrong when executing SQL statement to enable database CDC:", e);
        }
    }


    /**
     * @Descriotion 判断指定表是否启用CDC功能
     */
    public boolean Is_CDC_Enabled_Table(Druid druid) {
        Connection conn = druid.getConnection();
        Statement stmt = druid.getStmt(conn);
        String sql = "USE " + sqlServerConfiger.getSqlServer_database() + ";"
                + "SELECT is_tracked_by_cdc FROM sys.tables WHERE name = '"
                + sqlServerConfiger.getSqlServer_tableName()
                + "' and schema_id = SCHEMA_ID('"
                + sqlServerConfiger.getSqlServer_schema() + "');";
        logger.info("SQL语句(判断指定表是否启用CDC功能)为 " + sql);
        ResultSet result;

        int is_tracked_by_cdc = -1;
        try {
            result = stmt.executeQuery(sql);
            if (result.next()) {
                is_tracked_by_cdc = result.getInt(1);
            }
            druid.close(result, stmt, conn);
        } catch (SQLException throwables) {
            logger.error("## something goes wrong when executing SQL statement to inquire whether the table enable CDC:", throwables);
            druid.close(stmt, conn);
        }

        if (is_tracked_by_cdc == 1) {
            return true;
        }
        return false;
    }


    /**
     * @Descriotion 在测试表上启用CDC
     */
    public void Enable_CDC_Table(Druid druid) {
        Connection conn = druid.getConnection();
        Statement stmt = druid.getStmt(conn);
        String sql = "SET NOCOUNT ON USE " + sqlServerConfiger.getSqlServer_database() + ";"
                + "EXEC sys.sp_cdc_enable_table "
                + "@source_schema = '" + sqlServerConfiger.getSqlServer_schema() + "',"
                + "@source_name = '" + sqlServerConfiger.getSqlServer_tableName() + "',"
                + "@role_name = NULL,"
                + "@capture_instance = NULL,"
                + "@supports_net_changes = 1,"
                + "@index_name = NULL,"
                + "@captured_column_list = NULL,"
                + "@filegroup_name = default" + ";";
        logger.info("SQL语句(在测试表上启用CDC)为 " + sql);
        //执行SQL语句
        try {
            ResultSet result = stmt.executeQuery(sql);
            druid.close(result, stmt, conn);
        } catch (SQLException e) {
            logger.error("在测试表上启用CDC失败 { " + e + " } ");
            druid.close(stmt, conn);
        }
    }

    /**
     * @Descriotion 清空表中的数据
     */
    public void wipeData(Druid druid) {
        Connection conn = druid.getConnection();
        Statement stmt = druid.getStmt(conn);
        String sql = "use " + sqlServerConfiger.getSqlServer_database() + ";" +
                "delete from cdc.dbo_" + sqlServerConfiger.getSqlServer_tableName() + "_CT"
                + " where 1=1" + ";";
        logger.info("SQL语句(在测试表上启用CDC)为 " + sql);
        //执行SQL语句
        try {
            stmt.execute(sql);
        } catch (SQLException e) {
            logger.error("清空表中的数据失败 { " + e + " } ");
        } finally {
            druid.close(stmt, conn);
        }
    }


    /**
     * @Descriotion 获取增量数据
     */
    public boolean Get_Incremental_Data(Druid druid) {
        Connection conn = druid.getConnection();
        Statement stmt = druid.getStmt(conn);
        Boolean rsFlag = true;
        String newLsn = null;
        //lsn的初始值为000000
        String lsn = batchSqlServerLsn.get(sqlServerConfiger.getSqlServer_database()
                + "." + sqlServerConfiger.getSqlServer_schema()
                + "." + sqlServerConfiger.getSqlServer_tableName());
        String sql = "USE " + sqlServerConfiger.getSqlServer_database() + ";"
                + "select * from cdc." + sqlServerConfiger.getSqlServer_schema()
                + "_" + sqlServerConfiger.getSqlServer_tableName() + "_ct"
                + " where __$start_lsn > 0x" + lsn + ";";
        logger.info("SQL语句(获取增量数据)为 " + sql);
        //执行SQL语句
        ResultSet rs = null;
        try {
            rs = stmt.executeQuery(sql);
            ResultSetMetaData md = null; //获得结果集结构信息,元数据
            if (rs.next()) {
                do {
                    // 初始化 Producer 对象
                    Producer producer = new Producer(sqlServerConfiger.getProducerGroup(), sqlServerConfiger.getNameSrv());
                    // 遍历 rs 发送到 RocketMQ
                    newLsn = producer.sqlServerProduceSub(rs, sqlServerConfiger, sqlServerConfiger.getTopicName(), sqlServerConfiger.getTagName());
                    producer.close();
                } while (rs.next());
            } else {
                rsFlag = false;
            }
            if (newLsn != null) {
                synchronized (batchSqlServerLsn) {
                    batchSqlServerLsn.put(sqlServerConfiger.getSqlServer_database()
                            + "." + sqlServerConfiger.getSqlServer_schema()
                            + "." + sqlServerConfiger.getSqlServer_tableName(), newLsn);
                }
            }

        } catch (SQLException throwables) {
            logger.error("获取 ResultSet 失败 " + throwables);
        } finally {
            druid.close(rs, stmt, conn);
        }
        //查询不到新的增量数据，休眠3秒
        if (!rsFlag) {
            try {
                Thread.sleep(3000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
        return true;
    }

}


