package com.lhf.azkaban.springbatch.example.job.dao;

import com.lhf.azkaban.springbatch.example.job.comon.MyPage;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.support.incrementer.AbstractDataFieldMaxValueIncrementer;
import org.springframework.util.Assert;

import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;

/**
 * @Author:95780
 * @Date: 18:08 2019/11/25
 * @Description: 任务执行信息数据库操作类
 */
public class MyJdbcJobExecutionDao extends JdbcJobExecutionDao {
    private static final String FIELDS = "E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE, E.EXIT_MESSAGE, "
            + "E.CREATE_TIME, E.LAST_UPDATED, E.VERSION, I.JOB_INSTANCE_ID, I.JOB_NAME";

    private static final String GET_RUNNING_EXECUTIONS = "SELECT " + FIELDS
            + " from BATCH_JOB_EXECUTION E, BATCH_JOB_INSTANCE I "
            + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and E.END_TIME is NULL";

    private DataSource dataSource;

    /**
     * @param dataSource the dataSource to set
     */
    public void setDataSource(DataSource dataSource) {
        this.dataSource = dataSource;
    }

    /**
     * @see JdbcJobExecutionDao#afterPropertiesSet()
     */
    @Override
    public void afterPropertiesSet() throws Exception {

        Assert.state(dataSource != null, "DataSource must be provided");

        if (getJdbcTemplate() == null) {
            setJdbcTemplate(new JdbcTemplate(dataSource));
        }
        setJobExecutionIncrementer(new AbstractDataFieldMaxValueIncrementer() {
            @Override
            protected long getNextKey() {
                return 0;
            }
        });

        super.afterPropertiesSet();
    }

    /**
     * Re-usable mapper for {@link JobExecution} instances.
     *
     * @author Dave Syer
     *
     */
    protected class JobExecutionRowMapper implements RowMapper<JobExecution> {

        public JobExecutionRowMapper() {
        }

        @Override
        public JobExecution mapRow(ResultSet rs, int rowNum) throws SQLException {
            Long id = rs.getLong(1);
            JobExecution jobExecution;

            JobParameters jobParameters = getJobParameters(id);

            /*job的每次执行对应一个instance*/
            //JobInstance jobInstance = new JobInstance(0l, "");
            jobExecution = new JobExecution(id);//new JobExecution(null, jobParameters);
            jobExecution.setId(id);

            jobExecution.setStartTime(rs.getTimestamp(2));
            jobExecution.setEndTime(rs.getTimestamp(3));
            jobExecution.setStatus(BatchStatus.valueOf(rs.getString(4)));
            jobExecution.setExitStatus(new ExitStatus(rs.getString(5), rs.getString(6)));
            jobExecution.setCreateTime(rs.getTimestamp(7));
            jobExecution.setLastUpdated(rs.getTimestamp(8));
            jobExecution.setVersion(rs.getInt(9));
            return jobExecution;
        }

    }


    public MyPage<JobExecution> getExecutionsByRunningDate(boolean running, Date startTime, Date endTime, int start, int count) throws Exception {

        String table = "BATCH_JOB_EXECUTION E";
        String where = "1=1";
        ArrayList<Object> params = new ArrayList<>();
        if(running){
            where += " and E.END_TIME is NULL";
        }
        if(startTime!=null){
            where += " and E.START_TIME>=?";
            params.add(startTime);
        }
        if(endTime!=null){
            where += " and E.START_TIME<?";
            params.add(endTime);
        }

        int total = getJdbcTemplate().queryForObject(getQuery("select count(1) from "+table+" where "+where),Integer.class, params.toArray());
        List<JobExecution> list = null;

        try {
            params.add(start);
            params.add(count);
            list = getJdbcTemplate().query(getQuery("select E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE, E.EXIT_MESSAGE, E.CREATE_TIME, E.LAST_UPDATED, E.VERSION from "+table+" where "+where+" order by E.JOB_EXECUTION_ID desc limit ?,?;"),
                    new JobExecutionRowMapper(), params.toArray());
        }
        catch (IncorrectResultSizeDataAccessException e) {
            list = Collections.emptyList();
        }
        return new MyPage<>(start,total,count,list);

    }

    public Collection<JobExecution> getRunningJobExecutions() {
        return getJdbcTemplate().query(getQuery(GET_RUNNING_EXECUTIONS), new JobExecutionRowMapper());
    }



    public JobExecution findOne(Long executionId){
        List<JobExecution> list = null;
        ArrayList<Object> params = new ArrayList<>();
        String table = "BATCH_JOB_EXECUTION E, BATCH_JOB_INSTANCE I";
        String where = "1=1 and E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID";
        where += " and E.JOB_EXECUTION_ID=?";
        params.add(executionId);
        try {
            list = getJdbcTemplate().query(getQuery("select E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE," +
                            " E.EXIT_MESSAGE,E.CREATE_TIME, E.LAST_UPDATED, E.VERSION,I.JOB_INSTANCE_ID, I.JOB_NAME  from "+table+" where "+where),
                    new JobExecutionRowMapper(), params.toArray());
        } catch (IncorrectResultSizeDataAccessException e) {
            list = Collections.emptyList();
        }
        if(null!=list && list.size()>0){
            return list.get(0);
        }
        return null;
    }
}
