package com.zzg.demo.springbatch.demo.bean;

import com.zzg.demo.springbatch.demo.customer.MyJobListener;
import com.zzg.demo.springbatch.demo.customer.MyConsumerProcessor;
import com.zzg.demo.springbatch.demo.customer.MyReadListener;
import com.zzg.demo.springbatch.demo.customer.MyWriteListener;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.batch.core.*;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.core.launch.support.TaskExecutorJobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.dao.Jackson2ExecutionContextStringSerializer;
import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.builder.StepBuilder;
import org.springframework.batch.item.Chunk;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemStreamException;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.retry.RetryCallback;
import org.springframework.retry.RetryContext;
import org.springframework.retry.RetryListener;
import org.springframework.transaction.PlatformTransactionManager;
import javax.sql.DataSource;

@Configuration
@Slf4j
//@EnableBatchProcessing // 这个版本的springbatch不需要这个注解
public class SpringBatchDemoConfig {
    /**
     * JobRepository定义：Job的注册容器以及和数据库打交道（事务管理等）
     * @param dataSource
     * @param transactionManager
     * @return
     * @throws Exception
     */
    @Bean
    public JobRepository myJobRepository(DataSource dataSource, PlatformTransactionManager transactionManager) throws Exception{
        JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
        jobRepositoryFactoryBean.setDatabaseType("mysql");
        jobRepositoryFactoryBean.setTransactionManager(transactionManager);
        jobRepositoryFactoryBean.setDataSource(dataSource);
        // 使用JSON序列化，这样保存到batch_job_execution_context表和batch_step_execution_context表的上下文数据才是JSON格式的明文
        jobRepositoryFactoryBean.setSerializer(new Jackson2ExecutionContextStringSerializer()); 
        // 添加以下行来解决 incrementerFactory 为 null 的问题
        DefaultDataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource);
        jobRepositoryFactoryBean.setIncrementerFactory(incrementerFactory);
        jobRepositoryFactoryBean.afterPropertiesSet();
        return jobRepositoryFactoryBean.getObject();
    }

    /**
     * jobLauncher定义：job的启动器,绑定相关的jobRepository
     * @param dataSource
     * @param transactionManager
     * @return
     * @throws Exception
     */
    @Bean
    public TaskExecutorJobLauncher myJobLauncher(DataSource dataSource, PlatformTransactionManager transactionManager) throws Exception{
        TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher();
        // 设置jobRepository
        jobLauncher.setJobRepository(myJobRepository(dataSource, transactionManager));
        // 设置异步线程池，不显示设置的话会使用同步线程池
        jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor());
        jobLauncher.afterPropertiesSet();
        return jobLauncher;
    }


    @Bean
    public MyJobListener myJobListener(){
        /*Spring Batch 提供了完整的监听器体系：
        Job 级别：JobExecutionListener
        Step 级别：StepExecutionListener
        Chunk 级别：ChunkListener
        Item 级别：ItemReadListener、ItemProcessListener、ItemWriteListener
        Skip 级别：SkipListener
        Retry 级别：RetryListener
        ExecutionContext 级别：ExecutionContextListener，不像其他监听器那样有明确的 before/after 阶段，它只在上下文刷新时触发，它主要用于监控 ExecutionContext 的变化，而不是批处理的主要控制点。更多时候，开发者会通过 StepExecutionListener 或 JobExecutionListener 来访问和操作 ExecutionContext
        这些监听器可以帮助你监控和控制批处理作业的每个阶段。*/
        return new MyJobListener();
    }

    @Bean
    public Job changePersonInfoJob(DataSource dataSource, PlatformTransactionManager transactionManager, Step step1) throws Exception {
        return new JobBuilder("changePersonInfoJob", myJobRepository(dataSource, transactionManager))
                .incrementer(new RunIdIncrementer())
                // 【监听器】job级别的listener
                .listener(myJobListener())
                .flow(step1)
                .end()
                .build();
    }

    @Bean
    public Step changePersonAgeStep(DataSource dataSource, PlatformTransactionManager transactionManager, ItemWriter<PersonEntity> writer, FlatFileItemReader<PersonEntity> reader, MyConsumerProcessor<PersonEntity> processor) throws Exception {
        return new StepBuilder("changePersonAgeStep", myJobRepository(dataSource, transactionManager))
                // Chunk的机制(即每次读取一条数据，再处理一条数据，累积到一定数量后再一次性交给writer进行写入操作)
                .<PersonEntity, PersonEntity>chunk(2)
                // 【监听器】chunk级别的listener
                .listener(new ChunkListener() {
                    @Override
                    public void beforeChunk(ChunkContext context) {
                        log.info("ChunkListener-Chunk执行前");
                    }

                    @Override
                    public void afterChunk(ChunkContext context) {
                        log.info("ChunkListener-afterChunk");
                    }

                    @Override
                    public void afterChunkError(ChunkContext context) {
                        log.info("ChunkListener-Chunk执行失败时调用");
                    }
                })
                .faultTolerant()
                .retryLimit(2).retry(Exception.class)
                // 【监听器】retry级别的listener
                .listener(new RetryListener() {
                    @Override
                    public <T, E extends Throwable> boolean open(RetryContext context, RetryCallback<T, E> callback) {
                        log.info("RetryListener-是否允许重试-返回true");
                        return true;
                    }

                    @Override
                    public <T, E extends Throwable> void close(RetryContext context, RetryCallback<T, E> callback, Throwable throwable) {
                        log.info("RetryListener-重试结束时");
                    }

                    @Override
                    public <T, E extends Throwable> void onError(RetryContext context, RetryCallback<T, E> callback, Throwable throwable) {
                        log.info("RetryListener-每次重试失败时");
                    }
                })
                .skip(Exception.class).skipLimit(3)
                // 【监听器】skip级别的listener
                .listener(new SkipListener<>() {
                    @Override
                    public void onSkipInRead(Throwable t) {
                        log.info("SkipListener-读取时跳过");
                    }

                    @Override
                    public void onSkipInProcess(PersonEntity item, Throwable t) {
                        log.info("SkipListener-处理时跳过");
                    }

                    @Override
                    public void onSkipInWrite(PersonEntity item, Throwable t) {
                        log.info("SkipListener-写入时跳过");
                    }
                })
                // 关联reader，设置容错处理、listener
                .reader(reader)
                // 【监听器】Read级别的listener
                .listener(new MyReadListener())
                // 关联processor
                .processor(processor)
                // 【监听器】processor级别的listener
                .listener(new ItemProcessListener<>() {
                    @Override
                    public void beforeProcess(PersonEntity item) {
                        log.info("ItemProcessListener-处理前");
                    }

                    @Override
                    public void afterProcess(PersonEntity item, PersonEntity result) {
                        log.info("ItemProcessListener-处理成功后");
                    }

                    @Override
                    public void onProcessError(PersonEntity item, Exception e) {
                        log.info("ItemProcessListener-处理错误时");
                    }
                })
                // 关联writer，设置容错处理、listener
                .writer(writer)
                // 【监听器】write级别的listener
                .listener(new MyWriteListener())
                // 关联事务管理器
                .transactionManager(transactionManager)
                // 【监听器】step级别的listener
                .listener(new StepExecutionListener() {
                    @Override
                    public void beforeStep(StepExecution stepExecution) {
                        log.info("StepExecution-Step执行前");
                    }

                    @Override
                    public ExitStatus afterStep(StepExecution stepExecution) {
                        log.info("StepExecution-Step执行后");
                        return stepExecution.getExitStatus();
                    }
                })
                .build();
    }

    @Bean
    public FlatFileItemReader<PersonEntity> reader() {
        // 配置读取器，从csv文件中获取数据，使用FlatFileItemReader去读cvs文件，一行即一条数据
        FlatFileItemReader<PersonEntity> reader = new FlatFileItemReader<>() {
            @Override
            public void open(ExecutionContext executionContext) throws ItemStreamException {
                // 在这里可以访问和设置execution context参数
                log.info("FlatFileItemReader getCustomerReaderContext:{}", executionContext.get("customerReaderContext"));
                executionContext.put("customerReaderContext", String.format("customerReaderContext-%s", System.currentTimeMillis()));
                super.open(executionContext);
            }
        };
        // 设文件处在路径
        reader.setResource(new ClassPathResource("springbatch/TestSpringBatchDemoPerson.csv"));
        // 设置entity与csv数据做映射
        DefaultLineMapper<PersonEntity> lineMapper = new DefaultLineMapper<>(){{
            // 配置tokenizer
            setLineTokenizer(new DelimitedLineTokenizer() {
                {
                    // 根据PersonPO类的字段调整
                    setNames("name","age");
                }
            });
            // 配置fieldSetMapper
            setFieldSetMapper(new BeanWrapperFieldSetMapper<PersonEntity>() {
                {
                    setTargetType(PersonEntity.class);
                }
            });
        }};
        reader.setLineMapper(lineMapper);
        return reader;
    }

    /**
     * 注册校验器
     * @return
     */
    @Bean
    public MyBeanValidator<PersonEntity> myBeanValidator(){
        return new MyBeanValidator<>();
    }
    
    @Bean
    public MyConsumerProcessor<PersonEntity> processor() {
        // 定义数据处理逻辑
        MyConsumerProcessor<PersonEntity> myConsumerProcessor = new MyConsumerProcessor<>(item -> {
            item.setChangeAge(item.getAge() - 5);
            log.info("CHANGE:" + item);
//            // 模拟失败发生异常
//            if(StringUtils.equals(item.getName(), "Rose")){
//                int i = 1/0;
//            }
        });
        // 设置自定义的数值校验器
        myConsumerProcessor.setValidator(myBeanValidator());
        return myConsumerProcessor;
    }

    @Bean
    public ItemWriter<PersonEntity> writer(DataSource dataSource) {
        // 配置写入器
//        return new MyConsumerWriter<>(x -> log.info("变身后：" + x));
        
        // 配置写入器，使用springbatch预设的jdbc写入器jdbcBcatchItemWrite写入到数据库
        JdbcBatchItemWriter<PersonEntity> writer = new JdbcBatchItemWriter<>();
        // 设置有参数的sql语句
        writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>());
        String sql = "INSERT INTO person (name, age, change_age) VALUES(:name, :age, :changeAge)";
        writer.setSql(sql);
        writer.setDataSource(dataSource);
        return writer;
        
    }

//    @Bean
//    public CommandLineRunner run(JobLauncher jobLauncher, Job changePersonInfoJob) {
//        return args -> {
//            log.info("开始执行spring batch批处理。。。");
//            JobExecution execution = jobLauncher.run(changePersonInfoJob, new JobParameters());
//            log.info("Job Exit Status : " + execution.getStatus());
//        };
//    }

//    @Bean
//    public ApplicationRunner applicationRunner(JobLauncher jobLauncher, Job changePersonInfoJob) {
//        return args -> {
//            log.info("开始执行spring batch批处理。。。");
//            JobExecution execution = jobLauncher.run(changePersonInfoJob, new JobParameters());
//            log.info("Job Exit Status : " + execution.getStatus());
//        };
//    }
}