package com.iwhalecloud.uncc.dao.impl;

import com.iwhalecloud.uncc.common.ArchiverConstant;
import com.iwhalecloud.uncc.common.ArchiverThreadLocal;
import com.iwhalecloud.uncc.dao.ArchiverDao;
import com.iwhalecloud.uncc.db.DatasourceContextHolder;
import com.iwhalecloud.uncc.domain.ArchiveParam;
import com.iwhalecloud.uncc.model.ScalServiceExpanseLog;
import com.iwhalecloud.uncc.utils.BatchUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;

import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;

/**
 * @author zq
 * @date 2021/10/26 23:35
 * @description
 */
@Repository("scalServiceExpanseLogArchiverDao")
public class ScalServiceExpanseLogArchiverDaoImpl implements ArchiverDao {

    @Autowired
    private JdbcTemplate jdbcTemplate;

    @Override
    public Object query(Object o) {
        return null;
    }

    @Override
    public List queryList(Object o) {
        StringBuilder sql = new StringBuilder("SELECT * FROM SCAL_SERVICE_EXPANSE_LOG WHERE SERVICE_ID = ? AND CREATE_TIME BETWEEN ? AND ? AND ARCHIVER_FLAG = " + ArchiverConstant.ONLINE);
        ArchiveParam aP = ArchiverThreadLocal.getArchiveParam();
        List<ScalServiceExpanseLog> list = jdbcTemplate.query(sql.toString(), new Object[]{o,aP.getStartDate(),aP.getEndDate()} , new BeanPropertyRowMapper<>(ScalServiceExpanseLog.class));
        if (!list.isEmpty()) {
            return list;
        }
        return null;
    }

    @Override
    public List batchQuery(List list) {
        StringBuilder sql = new StringBuilder("SELECT");
        sql.append(" `ABSTRACT_ID`, `BATCH_ID`, `CREATE_TIME`, `RESULT`, `DESCRIPTION`, `HOST`, `SERVICE_ID`, `REASON`, `PATTERN`, `STRATEGY_ID`, `PARTITION_CODE`");
        sql.append(" FROM SCAL_SERVICE_EXPANSE_LOG WHERE SERVICE_ID in (:i)");
        ArchiveParam aP = ArchiverThreadLocal.getArchiveParam();
        int limit = (list.size())*(aP.getEachInsLines());
        sql.append(" AND CREATE_TIME BETWEEN \"" + aP.getStartDate() + "\" AND \"" + aP.getEndDate()+"\"");
        sql.append(" AND ARCHIVER_FLAG = " + ArchiverConstant.ONLINE);
        sql.append(" AND PARTITION_CODE = "+ DatasourceContextHolder.getDs());
        sql.append(" LIMIT " + limit);
        List<ScalServiceExpanseLog> res = BatchUtil.batchQueryBeans(sql,list,new RowMapper() {
            @Override
            public Object mapRow(ResultSet resultSet, int i) throws SQLException {
                ScalServiceExpanseLog scalServiceExpanseLog = new ScalServiceExpanseLog();
                scalServiceExpanseLog.setAbstractId(resultSet.getInt("ABSTRACT_ID"));
                scalServiceExpanseLog.setBatchId(resultSet.getInt("BATCH_ID"));
                scalServiceExpanseLog.setCreateTime(resultSet.getTimestamp("CREATE_TIME"));
                scalServiceExpanseLog.setResult(resultSet.getInt("RESULT"));
                scalServiceExpanseLog.setDescription(resultSet.getString("DESCRIPTION"));
                scalServiceExpanseLog.setHost(resultSet.getString("HOST"));
                scalServiceExpanseLog.setServiceId(resultSet.getInt("SERVICE_ID"));
                scalServiceExpanseLog.setReason(resultSet.getString("REASON"));
                scalServiceExpanseLog.setPattern(resultSet.getInt("PATTERN"));
                scalServiceExpanseLog.setStrategyId(resultSet.getString("STRATEGY_ID"));
                scalServiceExpanseLog.setPartitionCode(resultSet.getString("PARTITION_CODE"));
                return scalServiceExpanseLog;
            }
        });
        return res;
    }

    @Override
    public void insert(Object o) {

    }

    @Override
    public void batchInsert(List list) {
        StringBuilder sql = new StringBuilder("INSERT INTO `scal_service_expanse_log`");
        sql.append(" (`ABSTRACT_ID`, `BATCH_ID`, `CREATE_TIME`, `RESULT`, `DESCRIPTION`, `HOST`, `SERVICE_ID`, `REASON`, `PATTERN`, `STRATEGY_ID`, `PARTITION_CODE`, `ARCHIVER_TIME`)");
        sql.append(" VALUES (:abstractId,:batchId,:createTime,:result,:description,:host,:serviceId,:reason,:pattern,:strategyId,:partitionCode,:archiverTime)");
        BatchUtil.batchUpdateBeans(sql,list);
    }

    @Override
    public void update(int flag, Object o) {

    }

    @Override
    public void batchUpdate(int flag, List list) {
        StringBuilder sql = new StringBuilder("UPDATE SCAL_SERVICE_EXPANSE_LOG SET ARCHIVER_FLAG = " + flag + " WHERE ABSTRACT_ID = :abstractId");
        sql.append(" AND PARTITION_CODE = "+ DatasourceContextHolder.getDs());
        BatchUtil.batchUpdateBeans(sql,list);
    }

    @Override
    public void delete(Object o) {

    }

    @Override
    public void batchDelete(List list) {
        StringBuilder sql = new StringBuilder("DELETE FROM SCAL_SERVICE_EXPANSE_LOG WHERE ABSTRACT_ID = :abstractId");
        sql.append(" AND PARTITION_CODE = "+ DatasourceContextHolder.getDs());
        BatchUtil.batchUpdateBeans(sql,list);
    }
}
