package org.fjsei.yewu.job;

import com.alibaba.fastjson2.JSON;
import org.fjsei.yewu.filter.Node;
import org.fjsei.yewu.jpa.NormalExecutor;
import org.fjsei.yewu.jpa.PageOffsetFirst;
import org.fjsei.yewu.repository.maint.SplJob;
import org.fjsei.yewu.repository.maint.SplJobRepository;
import org.quartz.*;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.util.StringUtils;
import java.time.LocalDateTime;
import java.util.function.Function;

//类比流水消息队列
public interface SplitRepoJob  extends InterruptableJob {
    public boolean isInterrupt();
    public boolean executeSlice(JobKey  jobKey,SplJob  splJob) throws SchedulerException;
    public SplJobRepository splJobRepo();
    public void setSplJob(SplJob  splJob);
    public SplJob getSplJob();
    public void setContext(JobExecutionContext context);
    public JobExecutionContext getContext();

    @Override
    public default void execute(JobExecutionContext context) throws JobExecutionException {
        setContext(context);
//        if(!emSei.isJoinedToTransaction())      emSei.joinTransaction();
//        Assert.isTrue(emSei.isJoinedToTransaction(),"没emFjtj Transaction");
        JobKey  jobKey = context.getJobDetail().getKey();
        SplJob  splJob= splJobRepo().findByGroupNameAndJobName(jobKey.getGroup(),jobKey.getName());
        setSplJob(splJob);
        try {
            for(long i=0; ; i++){
                if(isInterrupt() || context.getScheduler().isShutdown()){
                    System.out.println("Scheduler down break");
                    return;
                }
                if(!executeSlice(jobKey,getSplJob()))
                    break;
            }
        } catch (Exception e) {
            Thread.currentThread().interrupt();  // 恢复中断状态
            System.out.println("线程被中断...");
        } finally {
        }
    }

    /**注意splJob.getLimt()设置小点，处理时间不能太长，否则无法及时响应context.getScheduler().isShutdown()关闭请求；
     * */
   public default <T extends Node<ID>,P extends Node<ID>,ID> boolean runSlice(SplJob  splJob,
                     NormalExecutor<T, ID> applyRepository,Function<T, String> sliceJob, Class<P> prjType,Class<ID> idClass) throws SchedulerException {
       Pageable pageable= PageOffsetFirst.of(0,splJob.getLimt());   //一次性获取小部分实体
       Slice<P> slice=null;
       if(!idClass.isAssignableFrom(idClass)) { // 注意：这里应该反过来检查，但由于ID是泛型，我们需要其他方法
           throw new IllegalArgumentException("idClass must be assignable from ID type");
       }
        //  @SuppressWarnings("unchecked")
       ID  idCurr = null;
       if(StringUtils.hasText(splJob.getCurr())){
            idCurr= JSON.parseObject(splJob.getCurr(), idClass);
        }
       ID idEndr = null;
       if(StringUtils.hasText(splJob.getEndr())){
            idEndr= JSON.parseObject(splJob.getEndr(), idClass);
        }
       ID idStart = null;
       if(StringUtils.hasText(splJob.getStart())){
            idStart= JSON.parseObject(splJob.getStart(), idClass);
        }
       //不能再用null!=splJob.getStart() && null!=splJob.getEndr()判别：
        if(null!=idCurr){
            if(null!=idEndr)
                slice= applyRepository.findAllByIdGreaterThanAndLessThanOrderByIdAsc(idCurr,idEndr, pageable, prjType);
            else
                slice= applyRepository.findAllByIdGreaterThanOrderByIdAsc(idCurr, pageable, prjType);
        }
        else if(null!=idStart && null!=idEndr)
            slice= applyRepository.findAllByIdGreaterThanAndLessThanOrderByIdAsc(idStart,idEndr, pageable, prjType);
        else if(null!=idStart)
            slice= applyRepository.findAllByIdGreaterThanOrderByIdAsc(idStart, pageable, prjType);
        else if(null!=idEndr)
            slice= applyRepository.findAllByIdLessEqThanOrderByIdAsc(idEndr, pageable, prjType);
        else
            slice= applyRepository.findAllByOrderById(pageable, prjType);
        int count=0;
        Scheduler scheduler=getContext().getScheduler();
        for (P parent:slice) {
            if(isInterrupt() || scheduler.isShutdown()) {
                break;      // 停止任务执行
            }
            try {
                sliceJob.apply((T) parent);
            } catch (Exception e) {
                e.printStackTrace();
                splJob.setResult(e.getMessage());
                splJob.setLast(LocalDateTime.now());
                //乐观锁方式，没采用强的事务一致保证；
                setSplJob(splJobRepo().save(splJob));
                return false;
            }
            count++;
        }
        //利用到上一次的｛初始化时第一次的｝查询最后一个数据
        T  lastsortHit= count>0? (T) slice.getContent().get(count-1) : null;
        //Job状态保存的时机：
        if(null!=lastsortHit) {
            String lastSort = JSON.toJSONString(lastsortHit.getId());
            splJob.setCurr(lastSort);
        }
        //【实际会自动保存的】无需加applyRepository.saveAll(slice);
        splJob.setOffs(splJob.getOffs() + count);
        splJob.setLast(LocalDateTime.now());
        if(slice.getNumberOfElements()<=0){     //不能用slice.getSize()<=0判定
        //   log.info("无有效row:{}", splJob.getOffs());
            splJob.setResult("任务正常完成！");
            setSplJob(splJobRepo().save(splJob));
            return false;
        }
        setSplJob(splJobRepo().save(splJob));    //横跨splJob.getLimt()的处理循环时间有点长了，避免事务时间跨度太大，考虑.save事务不做强约束。
        return true;
    }
}
