package com.stone.framework.batch.service;

import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;

import javax.sql.DataSource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.ItemSqlParameterSourceProvider;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import org.springframework.jdbc.support.incrementer.OracleSequenceMaxValueIncrementer;
import org.springframework.util.Assert;

public abstract class JdbcBatchService implements InitializingBean {

    private static final Logger LOGGER = LoggerFactory.getLogger(JdbcBatchService.class);

    private DataSource dataSource;

    private NamedParameterJdbcTemplate jdbcTemplate;

    private ItemSqlParameterSourceProvider<Object> itemSqlParameterSourceProvider;

    protected <S> void batchExecute(String sql, List<S> batchArgs) {
        SqlParameterSource[] parameterSources =
            batchArgs.stream().map(item -> itemSqlParameterSourceProvider.createSqlParameterSource(item))
                .toArray(SqlParameterSource[]::new);

        LOGGER.info("batchExecute size: {}, sql: {}", batchArgs.size(), sql);
        long startTime = System.currentTimeMillis();
        jdbcTemplate.batchUpdate(sql, parameterSources);
        long endTime = System.currentTimeMillis();
        LOGGER.info("batchExecute sql: {}, time: {} ms", sql.substring(0, 30).concat("..."), endTime - startTime);
    }

    protected <S> CompletableFuture<Void> batchExecute(Executor executor, String sql, List<S> batchArgs) {
        return CompletableFuture.runAsync(() -> this.batchExecute(sql, batchArgs), executor).exceptionally(ex -> {
            throw new RuntimeException(ex);
        });
    }

    protected <S> Integer count(String sql, S entity) {
        SqlParameterSource sqlParameterSource = itemSqlParameterSourceProvider.createSqlParameterSource(entity);
        return jdbcTemplate.queryForObject(sql, sqlParameterSource, Integer.class);
    }

    protected <S> List<S> queryList(String sql, S entity, RowMapper<S> rowMapper) {
        SqlParameterSource sqlParameterSource = itemSqlParameterSourceProvider.createSqlParameterSource(entity);
        return jdbcTemplate.query(sql, sqlParameterSource, rowMapper);
    }

    protected <S> List<S> queryList(String sql, Object entity, Class<S> clazz) {
        SqlParameterSource sqlParameterSource = itemSqlParameterSourceProvider.createSqlParameterSource(entity);
        return jdbcTemplate.query(sql, sqlParameterSource, BeanPropertyRowMapper.newInstance(clazz));
    }

    protected <S> List<S> queryList(String sql, Map<String, ?> paramMap, Class<S> clazz) {
        return jdbcTemplate.query(sql, new MapSqlParameterSource(paramMap), BeanPropertyRowMapper.newInstance(clazz));
    }

    protected Long getSequence(String sequenceName) {
        OracleSequenceMaxValueIncrementer inc = new OracleSequenceMaxValueIncrementer(dataSource, sequenceName);
        return inc.nextLongValue();
    }

    @Override
    public void afterPropertiesSet() throws Exception {
        Assert.notNull(this.dataSource, "dataSource must not be null");
        this.itemSqlParameterSourceProvider = new BeanPropertyItemSqlParameterSourceProvider<>();
        this.jdbcTemplate = new NamedParameterJdbcTemplate(this.dataSource);
    }

    public void setDataSource(DataSource dataSource) {
        this.dataSource = dataSource;
    }

}
