package com.example.jobs;


import com.example.listener.StepExecutionPartitionListener;
import com.example.model.User;
import com.example.partition.RangePartitioner;
import com.example.processor.UserProcessor;
import com.example.tasklet.DummyTasklet;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.core.partition.PartitionHandler;
import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler;
import org.springframework.batch.item.database.JdbcPagingItemReader;
import org.springframework.batch.item.database.PagingQueryProvider;
import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor;
import org.springframework.batch.item.file.transform.DelimitedLineAggregator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.jdbc.core.BeanPropertyRowMapper;

import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;

/**
 * @author lixing
 */
@Slf4j
@Configuration
@EnableBatchProcessing
public class PartitionerJob {

    @Autowired
    private JobBuilderFactory jobBuilderFactory;
    @Autowired
    private StepBuilderFactory stepBuilderFactory;
    @Autowired
    private DataSource dataSource;

    @Bean
    public Job PartitionJob() {
        return jobBuilderFactory.get("partitionJob")
                .incrementer(new RunIdIncrementer())
                .start(masterStep())
                .next(step2())
                .build();
    }

    @Bean
    public Step step2() {
        return stepBuilderFactory.get("step2")
                .tasklet(dummyTask())
                .build();
    }

    //汇聚任务，从处理结束后的汇聚
    @Bean
    public DummyTasklet dummyTask() {
        return new DummyTasklet();
    }

    //设置了我们前边定义的分片规则和分片之后的Handler
    @Bean
    public Step masterStep() {
        return stepBuilderFactory.get("masterStep")
                .partitioner(slave().getName(), rangePartitioner())
                .partitionHandler(masterSlaveHandler())
                .build();
    }

    @Bean
    public PartitionHandler masterSlaveHandler() {
        TaskExecutorPartitionHandler handler = new TaskExecutorPartitionHandler();
        //设置线程池
        handler.setTaskExecutor(taskExecutor());
        //设置分片的Step
        handler.setStep(slave());
        //需要分的片数
        handler.setGridSize(5);
        try {
            handler.afterPropertiesSet();
        } catch (Exception e) {
            e.printStackTrace();
        }
        return handler;
    }

    @Bean(name = "slave")
    public Step slave() {
        log.info("...........called slave .........");

        return stepBuilderFactory.get("slave")
                .listener(new StepExecutionPartitionListener())
                .<User, User>chunk(2)
                .reader(slaveReader(null, null, null))
                .processor(slaveProcessor(null))
                .writer(slaveWriter(null, null))
                .build();
    }

    //核心分区类
    @Bean
    public RangePartitioner rangePartitioner() {
        return new RangePartitioner();
    }

    @Bean
    public SimpleAsyncTaskExecutor taskExecutor() {
        return new SimpleAsyncTaskExecutor();
    }

    @Bean
    @StepScope
    public UserProcessor slaveProcessor(@Value("#{stepExecutionContext['name']}") String name) {
        log.info("********called slave processor **********");
        UserProcessor userProcessor = new UserProcessor();
        userProcessor.setThreadName(name);
        return userProcessor;
    }

    @Bean
    @StepScope
    public JdbcPagingItemReader<User> slaveReader(
            @Value("#{stepExecutionContext['_minRecode']}") final String fromId,
            @Value("#{stepExecutionContext['_maxRecode']}") final String toId,
            @Value("#{stepExecutionContext['name']}") final String name) {
        log.info("slaveReader start " + fromId + " " + toId);
        JdbcPagingItemReader<User> reader = new JdbcPagingItemReader<>();
        reader.setDataSource(dataSource);
        reader.setQueryProvider(queryProvider());
        Map<String, Object> parameterValues = new HashMap<>();
        parameterValues.put("fromId", fromId);
        parameterValues.put("toId", toId);
        log.info("Parameter Value " + name + " " + parameterValues);
        reader.setParameterValues(parameterValues);
        reader.setPageSize(1000);
        reader.setRowMapper(new BeanPropertyRowMapper<User>() {{
            setMappedClass(User.class);
        }});
        log.info("slaveReader end " + fromId + " " + toId);
        return reader;
    }

    //执行数据库读取操作
    @Bean
    public PagingQueryProvider queryProvider() {
        log.info("queryProvider start ");
        SqlPagingQueryProviderFactoryBean provider = new SqlPagingQueryProviderFactoryBean();
        provider.setDataSource(dataSource);
        provider.setSelectClause("select id, username, password, age");
        provider.setFromClause("from user");
        provider.setWhereClause("where id >= :fromId and id <= :toId");
        provider.setSortKey("id");
        log.info("queryProvider end ");
        try {
            return provider.getObject();
        } catch (Exception e) {
            log.info("queryProvider exception ");
            e.printStackTrace();
        }

        return null;
    }

    @Bean
    @StepScope
    public FlatFileItemWriter<User> slaveWriter(
            @Value("#{stepExecutionContext['_minRecode']}") final String fromId,
            @Value("#{stepExecutionContext['_maxRecode']}") final String toId) {
        FlatFileItemWriter<User> reader = new FlatFileItemWriter<>();
        reader.setResource(new FileSystemResource(
                "csv/outputs/users.processed" + fromId + "-" + toId + ".csv"));
        //reader.setAppendAllowed(false);
        reader.setLineAggregator(new DelimitedLineAggregator<User>() {{
            setDelimiter(",");
            setFieldExtractor(new BeanWrapperFieldExtractor<User>() {{
                setNames(new String[]{"id", "username", "password", "age"});
            }});
        }});
        return reader;
    }


    /*@Bean
    @StepScope
    public JdbcPagingItemReader<User> DBJbbcReaderDemo() {
        JdbcPagingItemReader<User> reader = new JdbcPagingItemReader<>();
        reader.setDataSource(this.dataSource); // 设置数据源
        reader.setFetchSize(100); // 设置一次最大读取条数
        reader.setRowMapper(new UserRowMapper()); // 把数据库中的每条数据映射到Person对中
        MySqlPagingQueryProvider queryProvider = new MySqlPagingQueryProvider();
        queryProvider.setSelectClause("id,name,per_desc,create_time,update_time,sex,score,price"); // 设置查询的列
        queryProvider.setFromClause("from person_buf"); // 设置要查询的表
//        Map<String, Object> paramsValue = new HashMap<>(2);
//        paramsValue.put("minRecode", minValue);
//        paramsValue.put("maxRecode", maxValue);
//        queryProvider.setWhereClause("where id >= :minRecode and id <= maxRecode");
//        .parameterValues(paramsValue)
        Map<String, Order> sortKeys = new HashMap<String, Order>();// 定义一个集合用于存放排序列
        sortKeys.put("id", Order.ASCENDING);// 按照升序排序
        queryProvider.setSortKeys(sortKeys);
        reader.setQueryProvider(queryProvider);// 设置排序列
        return reader;
    }*/
}
