package wiki.hitime.stl.daq.service.factory.impl.acquisiton.mysql;

import com.alibaba.druid.filter.logging.Slf4jLogFilter;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidDataSourceFactory;
import com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import wiki.hitime.stl.daq.bean.bo.DataDbConfigBO;
import wiki.hitime.stl.daq.enums.DbTypeEnum;
import wiki.hitime.stl.daq.service.factory.DataAcquisitionFactory;

import javax.annotation.Resource;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.Statement;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;

/**
 * @author zhangdaoping
 * @create 2019-07-09 17:06
 */
@Slf4j
@Service
public class MysqlDataAcquisitionImpl implements DataAcquisitionFactory {

    @Resource(name = "dbLogFilter")
    private Slf4jLogFilter slf4jLogFilter;

    /**
     * 根据url保存数据库连接源
     */
    private static ConcurrentHashMap<String, DruidDataSource> datasourceHashMap = new ConcurrentHashMap<>();

    /**
     * 获取支持的数据库类型
     *
     * @return 数据库类型枚举
     */
    @Override
    public DbTypeEnum getSupportDbType() {
        return DbTypeEnum.MySQL;
    }

    /**
     * 加载数据
     *
     * @param acquisitionExpression 采集表达式
     * @param dataDbConfigBO        数据源配置对象
     * @return
     */
    @Override
    public List<Object[]> loadData(String acquisitionExpression, DataDbConfigBO dataDbConfigBO) throws Exception {
        String[] columnArrays;
        try (
                Connection connection = this.loadConnection(dataDbConfigBO);
                Statement statement = connection.createStatement()
        ) {
            columnArrays = this.loadColumn(acquisitionExpression, statement);
        }

        try (
                Connection connection = this.loadConnection(dataDbConfigBO);
                Statement statement = connection.createStatement();
                ResultSet resultSet = statement.executeQuery(acquisitionExpression)
        ) {
            List<Object[]> dataList = new LinkedList<>();
            while (resultSet.next()) {
                Object[] row = new Object[columnArrays.length];
                for (int j = 0; j < columnArrays.length; j++) {
                    row[j] = resultSet.getObject(columnArrays[j]);
                }
                dataList.add(row);
            }
            return dataList;
        }
    }

    /**
     * 获取元数据
     *
     * @param subQuerySql
     * @param stat
     * @return
     * @throws Exception
     */
    private String[] loadColumn(String subQuerySql, Statement stat) throws Exception {
        String[] columnArrays;
        try {
            stat.setMaxRows(100);
            String columnSql = "SELECT * FROM (\n\t%s\n) rpt_view WHERE 1=0";
            String sql = String.format(columnSql, subQuerySql);
            ResultSet rs = stat.executeQuery(sql);
            ResultSetMetaData metaData = rs.getMetaData();
            columnArrays = new String[metaData.getColumnCount()];
            for (int i = 0; i < metaData.getColumnCount(); i++) {
                columnArrays[i] = metaData.getColumnLabel(i + 1);
            }
        } catch (Exception e) {
            log.error("获取元数据错误:" + e.getMessage());
            throw new MySQLSyntaxErrorException(e.getMessage());
        }
        return columnArrays;
    }

    /**
     * 获取连接对象
     *
     * @param config
     * @return
     * @throws Exception
     */
    private Connection loadConnection(DataDbConfigBO config) throws Exception {
        DruidDataSource oldDruidDataSource = datasourceHashMap.get(config.getConfigProperties().get("url"));
        if (oldDruidDataSource != null) {
            return oldDruidDataSource.getConnection();
        } else {
            DruidDataSource druidDataSource = (DruidDataSource) DruidDataSourceFactory.createDataSource(config.getConfigProperties());
            druidDataSource.setBreakAfterAcquireFailure(true);
            druidDataSource.setConnectionErrorRetryAttempts(3);
            druidDataSource.setProxyFilters(Collections.singletonList(slf4jLogFilter));
            datasourceHashMap.put(config.getConfigProperties().get("url"), druidDataSource);
            return druidDataSource.getConnection();
        }
    }
}
