package com.ruoyi.model.service.impl;

import java.io.*;
import java.nio.file.Files;
import java.sql.*;
import java.util.*;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.ruoyi.common.config.OCRConfig;
import com.ruoyi.common.config.RuoYiConfig;
import com.ruoyi.common.constant.Constants;
import com.ruoyi.common.core.domain.entity.SysDictData;
import com.ruoyi.common.exception.ServiceException;
import com.ruoyi.common.exception.base.BaseException;
import com.ruoyi.common.utils.SecurityUtils;
import com.ruoyi.common.utils.file.FileUploadUtils;
import com.ruoyi.common.utils.file.FileUtils;
import com.ruoyi.model.domain.*;
import com.ruoyi.model.mapper.DbInfoMapper;
import com.ruoyi.model.mapper.ModelInfoMapper;
import com.ruoyi.pdf.service.IRecognizePdfService;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.ruoyi.model.mapper.TableInfoMapper;
import com.ruoyi.model.service.ITableInfoService;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;

/**
 * 基本信息Service业务层处理
 * 
 * @author genghz
 * @date 2021-10-20
 */
@Service
public class TableInfoServiceImpl implements ITableInfoService 
{
    @Autowired
    private TableInfoMapper tableInfoMapper;

    @Autowired
    private ModelInfoMapper modelInfoMapper;

    /**
     * 查询基本信息
     * 
     * @param tableId 基本信息主键
     * @return 基本信息
     */
    @Override
    public TableInfo selectTableInfoByTableId(Long tableId)
    {
        return tableInfoMapper.selectTableInfoByTableId(tableId);
    }

    /**
     * 查询基本信息列表
     * 
     * @param tableInfo 基本信息
     * @return 基本信息
     */
    @Override
    public List<TableInfo> selectTableInfoList(TableInfo tableInfo)
    {
        return tableInfoMapper.selectTableInfoList(tableInfo);
    }

    @Resource
    private DbInfoMapper dbInfoMapper ;

    @Override
    public List<SysDictData> selectTableOptions(TableInfo tableInfo)
    {
        List<SysDictData> res = new ArrayList<>();
        if(Objects.isNull(tableInfo) || Objects.isNull(tableInfo.getDbId())) {
            return res ;
        }

        DbInfo dbInfo = dbInfoMapper.selectDbInfoByDbId(tableInfo.getDbId());
        if(Objects.isNull(dbInfo)) {
            return res ;
        }
        HikariDataSource dbPool = DBPool.getInstance().getDBPool(tableInfo.getDbId());

        String sql =  "select table_name, table_comment, create_time, update_time from information_schema.tables " +
                      " where table_schema = '" + dbInfo.getDefaultSchema() + "'"  +
                      " AND lower(table_name) like lower(concat('%','" + tableInfo.getTableName() + "', '%')) limit 10";
        System.out.println(sql);
        Connection connection = null ;
        try {
            connection = dbPool.getConnection();
            ResultSet tables = connection.createStatement().executeQuery(sql);
            while(tables.next()) {
                SysDictData sysDictData = new SysDictData();
                sysDictData.setDictLabel(tables.getString(1));
                sysDictData.setDictValue(tables.getString(1));
                res.add(sysDictData);
            }
            return res ;
        } catch (SQLException e) {
            e.printStackTrace();
            throw new BaseException("数据库信息异常") ;
        } finally {
            closeDb(connection);
        }
    }

    /**
     * 新增基本信息
     * 
     * @param tableInfo 基本信息
     * @return 结果
     */
    @Override
    public int insertTableInfo(TableInfo tableInfo)
    {
        HikariDataSource dbPool = DBPool.getInstance().getDBPool(tableInfo.getDbId());
        DbInfo dbInfo = dbInfoMapper.selectDbInfoByDbId(tableInfo.getDbId());
        ModelInfo modelInfo = modelInfoMapper.selectModelInfoByModelId(tableInfo.getModelId());
        List<ModelColumnInfo> modelColumnInfoList = modelInfo.getModelColumnInfoList();

        StringBuffer sql = new StringBuffer();
        sql.append("create table "+ (StringUtils.isNotEmpty(dbInfo.getDefaultSchema())?dbInfo.getDefaultSchema() + ".":"") +"`" + tableInfo.getTableName() + "` (") ;
        StringJoiner columns = new StringJoiner(",");
        columns.add("_rowid bigint(16) primary key auto_increment");
        for (ModelColumnInfo modelColumnInfo : modelColumnInfoList) {
            String type = " varchar(200)";
            switch (modelColumnInfo.getColumnType()) {
                case "1":
                    type = " datetime";
                    break;
                case "2": //整型
                    type = " bigint";
                    break;
                case "3": //浮点
                    type = " float";
                    break;
                case "4": //字符串
                    type = " varchar(200)";
                    break;
            }
            columns.add("`" + modelColumnInfo.getColumnCode() + "`" + type +" comment '"+ modelColumnInfo.getColumnName() + "' ");
        }
        columns.add("_file_id bigint(16)  comment '符件ID'");
        columns.add("_create_time datetime comment '数据生成时间'");
        columns.add("_log_id bigint(16)  comment '数据生成日志ID，主要用于关联对应的文件信息'");
        sql.append(columns.toString() + ") default charset utf8mb4 engine=myisam");

        Connection connection = null ;
        try {
            //connection = dbPool.getConnection();
            //connection.prepareStatement(sql.toString()).executeUpdate();
            executeUpdateSql(dbPool, sql.toString()) ;
        } finally {
            closeDb(connection);
        }
        return tableInfoMapper.insertTableInfo(tableInfo);
    }

    /**
     * 修改基本信息
     * 
     * @param tableInfo 基本信息
     * @return 结果
     */
    @Override
    public int updateTableInfo(TableInfo tableInfo)
    {
        return tableInfoMapper.updateTableInfo(tableInfo);
    }

    /**
     * 批量删除基本信息
     * 
     * @param tableIds 需要删除的基本信息主键
     * @return 结果
     */
    @Override
    public int deleteTableInfoByTableIds(Long[] tableIds)
    {
        return tableInfoMapper.deleteTableInfoByTableIds(tableIds);
    }

    /**
     * 删除基本信息信息
     * 
     * @param tableId 基本信息主键
     * @return 结果
     */
    @Override
    public int deleteTableInfoByTableId(Long tableId)
    {
        return tableInfoMapper.deleteTableInfoByTableId(tableId);
    }

    @Resource
    OCRConfig ocrConfig ;

    @Resource
    IRecognizePdfService iRecognizePdfService ;

    @Override
    public boolean uploadData(MultipartFile[] pdfFiles, Integer control, String[] fileNames, Long tableId ) {
        TableInfo tableInfo = tableInfoMapper.selectTableInfoByTableId(tableId);
        HikariDataSource dbPool = DBPool.getInstance().getDBPool(tableInfo.getDbId());
        DbInfo dbInfo = dbInfoMapper.selectDbInfoByDbId(tableInfo.getDbId());

        TableBatchUploadLog tableBatchUploadLog = writeBatchLog(control, tableId, tableInfo);
        Long batchId = tableBatchUploadLog.getBatchId();

        flushTable(control, tableInfo, dbPool, dbInfo);

        for (int index = 0; index < pdfFiles.length; index++) {
            MultipartFile pdfFile = pdfFiles[index];
            String path = null;
            byte[] bytes = new byte[0];
            try {
                bytes = pdfFile.getBytes();
            } catch (IOException e) {
                e.printStackTrace();
            }
            //保存文件
            SysFileInfo sysFileInfo = uploadFile(pdfFile);

            // 记录日志
            DataUploadLog dataUploadLog = writeFileUploadDetailLog(batchId, sysFileInfo);
            long logId = dataUploadLog.getLogId() ;

            long count = 0 ;
            //识别文件
            String pdfbase64 = iRecognizePdfService.encodeBase64(bytes);
            JSONArray ocr = null ;
            try {
                ocr = iRecognizePdfService.recognizePdf(ocrConfig.getUrl(), pdfbase64);
                dataUploadLog.setStatus(2);
                dataUploadLog.setRecognizeResult(ocr.toString());
                dataUploadLog.setFileCount(Integer.valueOf(ocr.size()).longValue());
            } catch (Exception e) {
                dataUploadLog.setStatus(3);
            }

            if(Objects.isNull(ocr)) {
                dataUploadLog.setStatus(3);
            }
            tableInfoMapper.updateTableUploadDetailLogStatus(dataUploadLog);
            //生成SQL语句
            ModelInfo modelInfo = modelInfoMapper.selectModelInfoByModelId(tableInfo.getModelId());
            String sql = createPrepareSql(logId, modelInfo, tableInfo, dbInfo);

            if(Objects.isNull(ocr)) {
               continue;
            }
            //识别结果入库
            inDB(dbPool, dataUploadLog, count, ocr, sql);
        }

        return true ;
    }

    private void inDB(HikariDataSource dbPool, DataUploadLog dataUploadLog, long count, JSONArray ocr, String sql) {
        Connection connection = null;
        try {
            connection = dbPool.getConnection();
            PreparedStatement preparedStatement = connection.prepareStatement(sql);
            for(int i = 0; i < ocr.size(); i++) {
                count++ ;
                JSONArray tmp = (JSONArray) ocr.get(i);
                for(int j=0; j<tmp.size(); j++) {
                    preparedStatement.setObject(j+1, tmp.get(j));
                }
                preparedStatement.executeUpdate();
            }
            if(!connection.getAutoCommit()) {
                connection.commit();
            }
            dataUploadLog.setStatus(4);
            dataUploadLog.setDbCount(count);
            tableInfoMapper.updateTableUploadDetailLogStatus(dataUploadLog);
        } catch (SQLException throwables) {
            dataUploadLog.setDbCount(count);
            dataUploadLog.setStatus(5);
            tableInfoMapper.updateTableUploadDetailLogStatus(dataUploadLog);
            throw new BaseException(throwables.getMessage());
        } finally {
            closeDb(connection);
        }
    }

    private DataUploadLog writeFileUploadDetailLog(Long batchId, SysFileInfo sysFileInfo) {
        DataUploadLog dataUploadLog = new DataUploadLog();
        dataUploadLog.setBatchId(batchId);
        dataUploadLog.setFileId(sysFileInfo.getFileId());
        dataUploadLog.setStatus(1);
        dataUploadLog.setCreateBy(SecurityUtils.getUsername());
        tableInfoMapper.insertTableUploadDetailLog(dataUploadLog);
        return dataUploadLog;
    }

    private TableBatchUploadLog writeBatchLog(Integer control, Long tableId, TableInfo tableInfo) {
        TableBatchUploadLog tableBatchUploadLog = new TableBatchUploadLog();
        tableBatchUploadLog.setTableId(tableId);
        tableBatchUploadLog.setTableName(tableInfo.getTableName());
        tableBatchUploadLog.setDbId(tableInfo.getDbId());
        tableBatchUploadLog.setModelId(tableInfo.getModelId());
        tableBatchUploadLog.setCreateBy(SecurityUtils.getUsername());
        tableBatchUploadLog.setFlag(control);
        tableInfoMapper.insertTableBatchUploadLog(tableBatchUploadLog);
        return tableBatchUploadLog;
    }

    private SysFileInfo uploadFile(MultipartFile pdfFile) {
        String path;

        long size = pdfFile.getSize();
        try {
            path = FileUploadUtils.upload(RuoYiConfig.getAvatarPath(), pdfFile);
        } catch (IOException e) {
            throw  new ServiceException("文件上传失败") ;
        }

        SysFileInfo sysFileInfo = new SysFileInfo();
        sysFileInfo.setFileName(pdfFile.getOriginalFilename());
        sysFileInfo.setFilePath(path);
        sysFileInfo.setContextType(pdfFile.getContentType());
        sysFileInfo.setSize(size);
        tableInfoMapper.insertFileInfo(sysFileInfo) ;
        return sysFileInfo;
    }

    private void flushTable(Integer control, TableInfo tableInfo, HikariDataSource dbPool, DbInfo dbInfo) {
        if(control.equals(2)) {
            String sql = "truncate table " +  dbInfo.getDefaultSchema()+ "." + tableInfo.getTableName() ;
            Connection connection = null ;
            try {
                executeUpdateSql(dbPool, sql);
            } finally {
                closeDb(connection);
            }
        }
    }

    @Override
    public Map<String,Object> queryData(Long tableId, Integer currentPage, Integer pageSize, String beginTime, String endTime, String fileFlag, String keywords) {
        TableInfo tableInfo = tableInfoMapper.selectTableInfoByTableId(tableId);
        HikariDataSource dbPool = DBPool.getInstance().getDBPool(tableInfo.getDbId());
        DbInfo dbInfo = dbInfoMapper.selectDbInfoByDbId(tableInfo.getDbId());
        ModelInfo modelInfo = modelInfoMapper.selectModelInfoByModelId(tableInfo.getModelId());
        List<ModelColumnInfo> modelColumnInfoList = modelInfo.getModelColumnInfoList();
        Map<String,Object> res = new HashMap<>();
        CharSequence separator = " and ";
        StringJoiner where = new StringJoiner(separator);

        if(StringUtils.isNotEmpty(beginTime)) {
            where.add("date_format(_create_time,'%Y-%m-%d') >= '" + beginTime + "'" );
        }
        if(StringUtils.isNotEmpty(endTime)) {
            where.add("date_format(_create_time,'%Y-%m-%d') <= '" + endTime + "'" );
        }
        if(StringUtils.isNotEmpty(fileFlag) && fileFlag.equals("1")) {
            where.add(" _file_id is not null");
        } else if (StringUtils.isNotEmpty(fileFlag) && fileFlag.equals("0")) {
            where.add(" _file_id is null");
        }
        StringJoiner columns = new StringJoiner(",");
        StringJoiner subWhere = new StringJoiner(" or ");
        for (ModelColumnInfo modelColumnInfo : modelColumnInfoList) {
            columns.add("`" +modelColumnInfo.getColumnCode() + "`");
            if(StringUtils.isNotEmpty(keywords)) {
                if ("4".equalsIgnoreCase(modelColumnInfo.getColumnType())) {
                    subWhere.add("upper("+modelColumnInfo.getColumnCode() + ") like '%" + keywords.toUpperCase() + "%' ");
                }
            }
        }
        if(StringUtils.isNotEmpty(keywords)) {
            where.add("(" + subWhere.toString() + ")");
        }

        columns.add("`_rowid`");
        String countSql = "select  count(1) from " + (StringUtils.isNotEmpty(dbInfo.getDefaultSchema())?dbInfo.getDefaultSchema() + ".":"") +tableInfo.getTableName() ;
        String sql = "select " + columns.toString() + ",_file_id from " + (StringUtils.isNotEmpty(dbInfo.getDefaultSchema())?dbInfo.getDefaultSchema() + ".":"") +tableInfo.getTableName() ;
        String page = " limit " + ((currentPage - 1) * pageSize) + "," + pageSize ;
        String whereSql = where.toString() ;
        if(StringUtils.isNotEmpty(whereSql)) {
            whereSql = " where " + whereSql ;
        } else {
            whereSql = "" ;
        }

        int anInt = 0 ;
        Connection connection = null;
        try {
            connection = dbPool.getConnection();
            ResultSet count = connection.createStatement().executeQuery(countSql + whereSql);
            while(count.next()) {
                anInt = count.getInt(1);
            }
            System.out.println(sql + whereSql + page);
            ResultSet resultSet = connection.createStatement().executeQuery(sql + whereSql + page);
            ArrayList<Map<String,Object>> data = new ArrayList<>();
            while (resultSet.next()) {
                HashMap<String, Object> map = new HashMap<>();
                for (ModelColumnInfo modelColumnInfo : modelColumnInfoList) {
                    String label = modelColumnInfo.getColumnCode() ;
                    Object value = resultSet.getObject(label);
                    map.put(label, value) ;
                }
                map.put("_rowid", resultSet.getObject("_rowid")) ;
                map.put("_file_id", resultSet.getObject("_file_id")) ;
                Long fileId = resultSet.getLong("_file_id");
                String fileName = "" ;
                String filePath = "" ;
                SysFileInfo fileInfo = tableInfoMapper.getFileInfo(fileId);
                if(!Objects.isNull(fileInfo)) {
                    fileName = fileInfo.getFileName();
                    filePath = fileInfo.getFilePath();
                }
                map.put("_file_name", fileName) ;
                map.put("_file_path", filePath) ;
                data.add(map);
            }
            res.put("data", data);
            res.put("count", anInt);
            return res ;
        } catch (SQLException e) {
            throw new BaseException(e.getMessage());
        } finally {
            closeDb(connection);
        }
    }

    @Override
    public int removeRowEvent(Long tableId, Long _rowId) {
        TableInfo tableInfo = tableInfoMapper.selectTableInfoByTableId(tableId);
        HikariDataSource dbPool = DBPool.getInstance().getDBPool(tableInfo.getDbId());
        DbInfo dbInfo = dbInfoMapper.selectDbInfoByDbId(tableInfo.getDbId());
        String sql = "delete from " + (StringUtils.isNotEmpty(dbInfo.getDefaultSchema())?dbInfo.getDefaultSchema() + ".":"") +tableInfo.getTableName() + " where _rowid = " + _rowId ;
        return executeUpdateSql(dbPool, sql);
    }

    @Override
    public int uploadFileEvent(MultipartFile file, Long tableId, Long _rowId)  {
        TableInfo tableInfo = tableInfoMapper.selectTableInfoByTableId(tableId);
        HikariDataSource dbPool = DBPool.getInstance().getDBPool(tableInfo.getDbId());
        DbInfo dbInfo = dbInfoMapper.selectDbInfoByDbId(tableInfo.getDbId());

        SysFileInfo sysFileInfo = uploadFile(file);

        String sql = "update " + (StringUtils.isNotEmpty(dbInfo.getDefaultSchema())?dbInfo.getDefaultSchema() + ".":"") +tableInfo.getTableName() +
                " set _file_id = " + sysFileInfo.getFileId() +
                " where _rowid = " + _rowId ;
        return executeUpdateSql(dbPool, sql);
    }

    private int executeUpdateSql(HikariDataSource dbPool, String sql) {
        Connection connection = null;
        try {
            connection = dbPool.getConnection();
            int i = connection.createStatement().executeUpdate(sql);
            if(!connection.getAutoCommit()) {
                connection.commit();
            }
            return i ;
        } catch (SQLException e) {
            throw new ServiceException(e.getMessage());
        } finally {
            closeDb(connection);
        }
    }

    @Override
    public List<Map<Object,Object>> getTableBatchUploadLogInfo(TableBatchUploadLog tableBatchUploadLog) {
        return tableInfoMapper.getTableBatchUploadLogInfo(tableBatchUploadLog);
    }


    @Override
    public List<DataUploadLog> getTableUploadDetailLogByBatchId(Long batchId) {
        return tableInfoMapper.getTableUploadDetailLogByBatchId(batchId);
    }

    @Override
    public int removeLog(Long logId) {
        tableInfoMapper.removeLog(logId) ;
        return 1 ;
    }


    @Override
    public int restocking(Long logId) {
        DataUploadLog dataUploadLog = tableInfoMapper.getTableUploadDetailLogById(logId);
        TableBatchUploadLog tableBatchUploadLog = tableInfoMapper.getTableBatchUploadLogById(dataUploadLog.getBatchId());

        ModelInfo modelInfo = modelInfoMapper.selectModelInfoByModelId(tableBatchUploadLog.getModelId());
        TableInfo tableInfo = tableInfoMapper.selectTableInfoByTableId(tableBatchUploadLog.getTableId());
        DbInfo dbInfo = dbInfoMapper.selectDbInfoByDbId(tableInfo.getDbId());
        HikariDataSource dbPool = DBPool.getInstance().getDBPool(tableInfo.getDbId());

        if(!Objects.isNull(dataUploadLog) && !Objects.isNull(dataUploadLog.getFileInfo())) {
            SysFileInfo fileInfo = dataUploadLog.getFileInfo();
            // 本地资源路径
            String localPath = RuoYiConfig.getProfile();
            // 数据库资源地址
            String downloadPath = localPath + com.ruoyi.common.utils.StringUtils.substringAfter(fileInfo.getFilePath(), Constants.RESOURCE_PREFIX);
            File file = new File(downloadPath);
            if(!file.exists()) {
                throw new ServiceException("文件不存在或者已被删除!");
            }

            //文件识别
            JSONArray ocr = null ;
            try {
                byte[] bytes = Files.readAllBytes(file.toPath());
                String pdfbase64 = iRecognizePdfService.encodeBase64(bytes);
                ocr = iRecognizePdfService.recognizePdf(ocrConfig.getUrl(), pdfbase64);
                dataUploadLog.setStatus(2);
                dataUploadLog.setRecognizeResult(ocr.toString());
                dataUploadLog.setFileCount(Integer.valueOf(ocr.size()).longValue());
                tableInfoMapper.updateTableUploadDetailLogStatus(dataUploadLog);
            } catch (IOException e) {
                dataUploadLog.setStatus(3);
                tableInfoMapper.updateTableUploadDetailLogStatus(dataUploadLog);
                throw new ServiceException("文件读取失败!");
            } catch (Exception e) {
                dataUploadLog.setStatus(3);
                tableInfoMapper.updateTableUploadDetailLogStatus(dataUploadLog);
            }

            if(Objects.isNull(ocr)) {
                return 0;
            }

            String sql = createPrepareSql(logId, modelInfo, tableInfo, dbInfo);
            Connection connection = null;
            long count = 0;
            try {
                connection = dbPool.getConnection();
                //删除之前的数据
                String deleteSql = "delete from "+ (StringUtils.isNotEmpty(dbInfo.getDefaultSchema()) ? dbInfo.getDefaultSchema() + "." : "") + tableInfo.getTableName() +
                        " where _log_id = " + logId ;
                executeUpdateSql(dbPool, deleteSql) ;

                PreparedStatement preparedStatement = connection.prepareStatement(sql);

                //识别结果入库
                for( int i = 0; i < ocr.size(); i++ ) {
                    count ++ ;
                    JSONArray tmp = (JSONArray)ocr.get(i);
                    for(int j=0; j<tmp.size(); j++) {
                        preparedStatement.setObject(j+1, tmp.get(j));
                    }
                    preparedStatement.executeUpdate();
                }
                if(!connection.getAutoCommit()) {
                    connection.commit();
                }
                dataUploadLog.setStatus(4);
                dataUploadLog.setDbCount(count);
                tableInfoMapper.updateTableUploadDetailLogStatus(dataUploadLog);
            } catch (SQLException e) {
                dataUploadLog.setDbCount(count);
                dataUploadLog.setStatus(5);
                tableInfoMapper.updateTableUploadDetailLogStatus(dataUploadLog);
                throw new BaseException(e.getMessage());
            } finally {
                closeDb(connection);
            }

        } else {
            dataUploadLog.setStatus(-1);
            tableInfoMapper.updateTableUploadDetailLogStatus(dataUploadLog);
        }
        return 1 ;
    }

    private void closeDb(Connection connection) {
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException throwables) {
                throwables.printStackTrace();
            }
        }
    }

    //生成SQL语句
    private String createPrepareSql(Long logId, ModelInfo modelInfo, TableInfo tableInfo, DbInfo dbInfo) {
        List<ModelColumnInfo> modelColumnInfoList = modelInfo.getModelColumnInfoList();
        StringJoiner columns = new StringJoiner(",");
        for (ModelColumnInfo modelColumnInfo : modelColumnInfoList) {
            columns.add("`" + modelColumnInfo.getColumnCode() + "`");
        }
        columns.add("_create_time ");
        columns.add("_log_id ");

        return "insert into " + (StringUtils.isNotEmpty(dbInfo.getDefaultSchema()) ? dbInfo.getDefaultSchema() + "." : "") + tableInfo.getTableName() +
                "(" + columns.toString() + ")" +
                " values(" + StringUtils.repeat("?", ",", modelColumnInfoList.size()) + ", sysdate()," + logId + ")";
    }


    public static void main(String[] args) throws SQLException, InterruptedException {
        HikariConfig config = new HikariConfig();
        HikariDataSource ds ;
        config.setDriverClassName("com.mysql.cj.jdbc.Driver");
        config.setJdbcUrl("jdbc:mysql://localhost:3306/ry-vue?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8");
        config.setUsername("root");
        config.setPassword("root");
        config.addDataSourceProperty("cachePrepStmts", "true");
        config.addDataSourceProperty("prepStmtCacheSize", "250");
        config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
        config.setKeepaliveTime(100l);
        config.setPoolName("Mysql-sync-pool-1");
        config.setMinimumIdle(3);
        config.setMaximumPoolSize(20);
        //config.setMinimumIdle(10);
        config.addDataSourceProperty("logWriter",new PrintWriter(System.out));
        ds = new HikariDataSource(config);

        for (int i = 0; i < 100 ; i++) {
            Connection connection = ds.getConnection();
            ResultSet resultSet = connection.createStatement().executeQuery("select sysdate()");
            while (resultSet.next()) {
                System.out.println(resultSet.getString(1));
            }
            connection.close();
            Thread.sleep(1000);
        }


        System.out.println("执行完了！！");

        Connection connection = ds.getConnection();
        connection.createStatement().executeQuery("select 121") ;
        connection.close();

    }
}
