package com.tplhk.batch.readStr;

import com.tplhk.batch.uselisten.MyChunkListener;
import com.tplhk.batch.uselisten.MyJobListener;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.*;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.core.scope.context.StepContext;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.support.CompositeItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.ApplicationArguments;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.xml.crypto.Data;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * @ClassName : BatchConfig
 * @Description : TODO
 * @Author : taiping
 * @Date: 2020/12/6 16:16
 **/
@Slf4j
@Configuration
public class BatchConfig {

    @Autowired
    public JobBuilderFactory jobBuilderFactory;

    @Autowired
    public StepBuilderFactory stepBuilderFactory;

    @Autowired
    private Processor2 processor2;

    @Autowired
    private Step errorStep;

    /**
     * 一个简单基础的Job通常由一个或者多个Step组成
     *
     * @return
     */
    @Bean
    public Job processJob() {
//        return jobBuilderFactory.get("readFile")
//                .incrementer(new RunIdIncrementer())
//                .listener(jobListener())
//                .flow(orderStep1()).end().build();

//        JobParametersIncrementer jobParametersIncrementer = new RunIdIncrementer();
//        JobParameters jobParameters = new JobParametersBuilder().addLong("time",System.currentTimeMillis()).toJobParameters();
//        jobParametersIncrementer.getNext(jobParameters);

        return jobBuilderFactory.get("readFile")
//                .incrementer(jobParametersIncrementer)
                // 设置监听
                .listener(jobListener())
                // 设置步骤
                .start(orderStep1())
//                .next(errorStep)
//                .next(xxxStep3)
                .build();
    }

    @Bean
    public Step orderStep1() {
        return stepBuilderFactory.get("orderStep1")
                // <输入,输出> 。chunk通俗的讲类似于SQL的commit;
                // 这里表示处理(processor) 2 条后写入(writer)一次。
                .<String, String>chunk(5)
                // MyChunkListener 似乎与 skip 同时设置时， MyChunkListener不起作用
//                .listener(new MyChunkListener())
                // 读取数据
                .reader(new Reader())
                // 每次读取时一定会进入 FileItemReadListener
                .listener(new FileItemReadListener(new Writer()))
                // 处理
//                .processor(new Processor())
                .processor(processor())
                //  写入
                .writer(new Writer())
                .listener(new WriteLinster())
                // 可以容忍 2 次 Exception 异常，第三次异常JOB就停止并标志失败: 不知道什么原因，以前测试没有问题的，现在 retry 不起作用
                .faultTolerant().retry(Exception.class).retryLimit(30)
                .faultTolerant().skip(Exception.class).skipLimit(30).listener(new SkipLinster())
                .build();
    }

    // 多个处理器的写法
    @Bean
    public CompositeItemProcessor<String, String> processor() {
        CompositeItemProcessor<String, String> processor = new CompositeItemProcessor<String, String>();
        List<ItemProcessor<String, String>> delagates = new ArrayList<>();
        // 还可以写很多个处理器
        delagates.add(new Processor());
        delagates.add(processor2);
        processor.setDelegates(delagates);

        return processor;
    }


    @Bean
    public JobExecutionListener jobListener() {
        return new JobCompletionListener();
    }

}
