package com.hdw.job.service;

import com.hdw.common.base.BaseException;
import com.hdw.common.base.service.BaseMsgServiceImpl;
import com.hdw.common.constant.CommonConstant;
import com.hdw.common.db.DynamicDataSource;
import com.hdw.common.db.HikariDataSourceExt;
import com.hdw.common.util.DateUtil;
import com.hdw.common.util.LocalDateUtil;
import com.hdw.common.util.SnowflakeIdWorker;
import com.hdw.common.util.SpringBeanUtil;
import com.hdw.common.vo.DataExecParamVO;
import com.hdw.job.api.EtlRunService;
import com.hdw.job.batch.dao.JdbcJobExecutionExDao;
import com.hdw.job.batch.dao.JdbcStepExecutionExDao;
import com.hdw.job.batch.job.OdsAndDimJob;
import com.hdw.job.batch.job.OdsToDwJob;
import com.hdw.job.bean.model.EtlGroupBatch;
import com.hdw.job.constant.JobConstant;
import com.hdw.job.mapper.EtlGroupBatchMapper;
import com.hdw.job.utils.JobUtil;
import org.springframework.batch.core.*;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcOperations;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;

import javax.annotation.Resource;
import java.lang.reflect.Field;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;

/**
 * Created by liujunlei on 2021/6/2.
 */
@Service
public class EtlRunServiceImpl implements EtlRunService {
    private static ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
    private static Lock readLock = readWriteLock.readLock();
    private static Lock writeLock = readWriteLock.writeLock();

    private static final Map<Long, JobExecution> runTask = new ConcurrentHashMap<>();
    private static final ConcurrentLinkedQueue<JobParameters> waitTask = new ConcurrentLinkedQueue<>();

    @Resource(name = "myJobLauncher")
    JobLauncher myJobLauncher;
    @Resource(name = "jobAsyncLauncher")
    JobLauncher jobLauncher;
    @Autowired
    OdsAndDimJob odsAndDimJob;
    @Autowired
    OdsToDwJob odsToDwJob;
    @Autowired
    JdbcOperations jdbcOperations;
    @Autowired
    EtlGroupBatchMapper etlGroupBatchMapper;
    @Autowired
    BaseMsgServiceImpl baseMsgService;


    JdbcJobExecutionExDao jobExecutionDao;
    JdbcStepExecutionExDao stepExecutionDao;

   /* @Override
    public void runBatchJob(DataExecParamVO param, Long groupId, Integer exeType) throws BaseException {
        param.setGroupId(groupId);
        param.setExeType(exeType);
        this.addAndStartJob(param);
    }*/

    @Override
    public void runBatchJobList(List<DataExecParamVO> params, Long groupId, Integer exeType) throws BaseException {
        if (params == null || params.size() == 0) return;
        if(StringUtils.isEmpty(groupId)) {
            groupId = SnowflakeIdWorker.getId();
        }
        for (DataExecParamVO param : params) {
            param.setGroupId(groupId);
            param.setExeType(exeType);
            this.addAndStartJob(param);
        }
    }

    @Override
    public boolean isGroupEnd(Long groupId) {
        boolean isEnd = true;
        String gId ,status;
        for (JobExecution jobExecution : runTask.values()) {
            gId = jobExecution.getJobParameters().getString(JobConstant.JOB_GROUP_ID);
            status = jobExecution.getStatus().name();
            if (groupId==Long.parseLong(gId)) {
                if(!status.equals(BatchStatus.COMPLETED.name()) && !status.equals(BatchStatus.FAILED.name())) {
                    isEnd = false;
                    break;
                }
            }
        }
        return isEnd;
    }

    @Override
    public String getStatus(Long groupId) {
        EtlGroupBatch batch = new EtlGroupBatch();
        batch.setIdGroup(groupId);
        List<EtlGroupBatch> list = etlGroupBatchMapper.findByModel(batch);
        for(EtlGroupBatch gb:list) {
            if(!BatchStatus.COMPLETED.name().equals(gb.getStatus())) {
                return BatchStatus.FAILED.name();
            }
        }
        return BatchStatus.COMPLETED.name();
    }

    private void addAndStartJob(DataExecParamVO paramVO) {
        try {
            writeLock.lock();
            JobParameters jobParameters = this.getJobParameters(paramVO);
            waitTask.add(jobParameters);
            this.startStoppingJob();
        } finally {
            writeLock.unlock();
        }
    }

    ExecutorService singleThreadExecutor = Executors.newSingleThreadExecutor();

    private void startStoppingJob() {
        try {
            writeLock.lock();
            JobParameters remove;
            while (waitTask.size() > 0) {
                try {
                    remove = waitTask.remove();
                    runJob(remove);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
            if(runTask.keySet().size()>0) {
                singleThreadExecutor.execute(new Runnable() {
                    @Override
                    public void run() {
                        Long idGroup;
                        while (runTask.keySet().size()>0) {
                            try {
                                EtlGroupBatch groupBatch;
                                BatchStatus status;
                                for (Long aLong : runTask.keySet()) {
                                    JobExecution jobExecution = runTask.get(aLong);
                                    status = jobExecution.getStatus();
                                    if (status == BatchStatus.FAILED || status == BatchStatus.COMPLETED) {
                                        groupBatch = new EtlGroupBatch();
                                        groupBatch.setIdJob(aLong);
                                        groupBatch.setStatus(status.name());
                                        idGroup = runTask.get(aLong).getJobParameters().getLong(JobConstant.JOB_GROUP_ID);
                                        groupBatch.setIdGroup(idGroup);
                                        etlGroupBatchMapper.insert(groupBatch);
                                        runTask.remove(aLong);
                                        if(status == BatchStatus.FAILED) {
                                            baseMsgService.insert(CommonConstant.MSG_CODE_C02,"主键为["+aLong+"]的任务执行失败","组号为["+idGroup+"]");
                                        }
                                    }
                                }
                                Thread.yield();
                            }catch (Exception e) {
                                e.printStackTrace();
                                break;
                            }
                        }
                    }
                });
            }

        } finally {
            writeLock.unlock();
        }
    }

    /*@Override
    public List<JobExecution> getJobList(String jobType, Integer exeType) throws BaseException {
        if (StringUtils.isEmpty(jobType) || exeType == null) return null;

        this.initDao();
        try {
            readLock.lock();
            List<JobExecution> list = new ArrayList();
            Iterator<JobParameters> iterator = waitTask.iterator();
            while (iterator.hasNext()) {
                JobParameters next = iterator.next();
                if (jobType.equals(next.getString(JobConstant.JOB_TYPE))) {
                    JobExecution jobExecution = new JobExecution(null, null, next, null);
                    list.add(jobExecution);
                }
            }
            List<JobExecution> top20JobExecutions = jobExecutionDao.getTopJobExecutions(exeType, jobType, 30);
            if (top20JobExecutions != null && top20JobExecutions.size() > 0) {
                try {
                    Class<StepExecution> stepExecutionClass = StepExecution.class;
                    Field field = stepExecutionClass.getDeclaredField("jobExecution");
                    field.setAccessible(true);
                    for (JobExecution jobExecution : top20JobExecutions) {
                        if (runTask.get(jobExecution.getId()) == null && jobExecution.getStatus().equals(BatchStatus.STARTED)) {
                            jobExecution.setStatus(BatchStatus.ABANDONED);
                        }
                        List<StepExecution> stepExecution = stepExecutionDao.getStepExecution(jobExecution);
                        for (StepExecution execution : stepExecution) {
                            field.set(execution, null);
                        }
                        list.add(jobExecution);
                    }
                } catch (Exception e) {
                    throw new BaseException("获取失败" + e.getMessage(), e);
                }
            }
            return new ArrayList<>(list);
        } finally {
            readLock.unlock();
        }
    }*/

    private String getTableKey(JobParameters parameters) {
        String tbTar = parameters.getString(JobConstant.JOB_TABLENAME);
        String idDbTar = parameters.getString(JobConstant.JOB_DB_TAR);
        return idDbTar + "_" + tbTar;
    }

    private void runJob(JobParameters parameters) {
        String jobType = parameters.getString(JobConstant.JOB_TYPE);
        Job job = null;
        try {
            if (JobConstant.JOB_TYPE_ODS.equals(jobType)) {
                job = (Job) SpringBeanUtil.getBean(JobConstant.JOBNAME_HOS_HDW);
                jobExec(job,parameters);
            } else if (JobConstant.JOB_TYPE_DIM.equals(jobType)) {
                job = (Job) SpringBeanUtil.getBean(JobConstant.JOBNAME_HOS_HDW);
                jobExec(job,parameters);
            } else if (JobConstant.JOB_TYPE_DW.equals(jobType)) {
                job = (Job) SpringBeanUtil.getBean(JobConstant.JOBNAME_ODS_DW);
                jobExec(job,parameters);
            } else if (JobConstant.JOB_TYPE_DM.equals(jobType)) {
                String startDate = parameters.getString(JobConstant.JOB_D_BEGIN);
                String endDate = parameters.getString(JobConstant.JOB_D_END);
                Date dtBegin = LocalDateUtil.dateFormat.parse(startDate+" 00:00:00");
                Date dtEnd = LocalDateUtil.dateFormat.parse(endDate+" 00:00:00"),dtNext;
                int counterStop = 0;//计算器，防止死循环，无实际意义
                job = (Job) SpringBeanUtil.getBean(JobConstant.JOBNAME_DW_DM);
                if (job == null) return;
                JobParametersBuilder parametersBuilder = new JobParametersBuilder();
                parametersBuilder.addJobParameters(parameters);
                String dBegin;
                while (dtBegin.before(dtEnd) && counterStop++ < 10000) {
                    dBegin = DateUtil.toDateStrByFormat(dtBegin, "yyyyMMdd");
                    parametersBuilder.addString(JobConstant.JOB_D_BEGIN,dBegin)
                            .addLong("time", System.currentTimeMillis())
                            .addString(JobConstant.JOB_DELSQL, parameters.getString(JobConstant.JOB_DELSQL).replaceAll("\\$\\{D_BEGIN\\}",dBegin))
                            .addString(JobConstant.JOB_QUERYSQL, parameters.getString(JobConstant.JOB_QUERYSQL).replaceAll("\\$\\{D_BEGIN\\}", dBegin));
                    jobExec(job,parametersBuilder.toJobParameters());
                    dtNext = DateUtil.getNextDate(dtBegin, 3);
                    dtBegin = dtNext;
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    private void jobExec(Job job,JobParameters parameters) throws BaseException {
        try {
            JobExecution run = jobLauncher.run(job, parameters);
            runTask.put(run.getId(), run);
        } catch (Exception e) {
            throw new BaseException("job执行异常",e);
        }
    }


    private JobParameters getJobParameters(DataExecParamVO param) {
        try {
            JobParametersBuilder jobParametersBuilder = new JobParametersBuilder()
                    .addLong("time", System.currentTimeMillis())
                    .addString(JobConstant.JOB_D_BEGIN, param.getStartDate())
                    .addString(JobConstant.JOB_D_END, param.getEndDate())
                    .addString(JobConstant.JOB_DES, param.getDes())
                    //.addDate(JobConstant.JOB_DT_DAY_BEGIN,DateUtil.toDateByFormat(param.getStartDate()+" 00:00:00","yyyyMMdd HH:mm:ss"))
                    //.addDate(JobConstant.JOB_DT_DAY_END,DateUtil.toDateByFormat(param.getEndDate()+" 00:00:00","yyyyMMdd HH:mm:ss"))
                    .addString(JobConstant.JOB_DELSQL, param.getDelSql())
                    //.addString(JobConstant.JOB_AFTER_SQL, param.getAfterSql())
                    .addString(JobConstant.JOB_QUERYSQL, param.getQuerySql())
                    .addString(JobConstant.JOB_TABLENAME, param.getTbTar())
                    .addString(JobConstant.JOB_TYPE, param.getJobType())
                    .addString(JobConstant.JOB_DB_SOU, param.getIdDbSou())
                    .addString(JobConstant.JOB_DB_TAR, param.getIdDbTar())
                    .addLong(JobConstant.JOB_GROUP_ID, param.getGroupId())
                    .addLong(JobConstant.JOB_EXE_TYPE, param.getExeType().longValue());

            HikariDataSourceExt dataSource = DynamicDataSource.getHikariDataSourceById(param.getIdDbSou());
            if (dataSource != null) jobParametersBuilder.addString("characterSetSou", dataSource.getCharacterSet());
            dataSource = DynamicDataSource.getHikariDataSourceById(param.getIdDbTar());
            if (dataSource != null) jobParametersBuilder.addString("characterSetTar", dataSource.getCharacterSet());

            return jobParametersBuilder.toJobParameters();
        } catch (Exception e) {
            e.printStackTrace();
        }
        return null;
    }
}
