//package com.batch.quartz.job;
//
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
//import org.springframework.batch.core.*;
//import org.springframework.batch.core.launch.JobLauncher;
//import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
//import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
//import org.springframework.batch.core.repository.JobRestartException;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.beans.factory.annotation.Qualifier;
//import org.springframework.stereotype.Service;
//
//import java.util.UUID;
//
///**
// * <h3>partitionjob</h3>
// * <p>将任务 触发定义到服务层</p>
// *
// * @author : hduong
// * @version : 1.0
// * @date : 2019-12-20 14:20
// **/
//@Service
//public class PiLiangFileServiceImpl {
//
//    private static final Logger LOGGER = LoggerFactory.getLogger(PiLiangFileServiceImpl.class);
//
//    @Autowired
//    private JobLauncher jobLauncher;
//
//    @Autowired
//    @Qualifier("piLiangJob")
//    private Job piLiangJob;
//
//    public void executeBatchJob(String taskId) {
//        String fileName = "customer.txt";
//        JobParameters jobParameters = new JobParametersBuilder()
//                .addString("taskId", taskId)
//                .addString("fileName", fileName)
//                .addString("uuid", UUID.randomUUID().toString().replace("-",""))
//                .toJobParameters();
//        JobExecution jobExecution = null;
//        try {
//            jobExecution = jobLauncher.run(piLiangJob, jobParameters);
//        } catch (Exception e) {
//            e.printStackTrace();
//            LOGGER.error("执行任务失败: {}", e);
//        }
//        LOGGER.info("批量 :  执行结果: {}", jobExecution.getExitStatus().toString());
//    }
//
//
//}
