package com.example.springbatch.job;

import com.example.springbatch.beans.ETLLogSummary;
import com.example.springbatch.common.KafkaReader;
import com.example.springbatch.processor.LogItemProcesser;
import com.example.springbatch.service.KafkaService;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.adapter.ItemReaderAdapter;
import org.springframework.batch.item.database.JdbcCursorItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.BeanPropertyRowMapper;

import javax.sql.DataSource;
import java.util.Map;

/**
 * Created by Liaopan on 2018/9/6.
 */
@Configuration
@EnableBatchProcessing
public class BatchConfiguration {
    static long count = 0;
    @Autowired
    JobBuilderFactory jobBuilderFactory;

    @Autowired
    StepBuilderFactory stepBuilderFactory;

    @Autowired
    KafkaService kafkaService;

    @Bean
    LogItemProcesser logItemProcesser(){
        return new LogItemProcesser();
    }

    @Bean
    public ItemReader reader(DataSource dataSource){
        JdbcCursorItemReader<ETLLogSummary> reader = new JdbcCursorItemReader<>();
        reader.setDataSource(dataSource);
        reader.setSql("select summary_id summaryId,module_name moduleName,etl_type etlType,`status`,business_model businessModel," +
                "log_summary_start logSummaryStart,log_summary_end logSummaryEnd,start_date startDate,end_date endDate," +
                "start_numeric startNumeric,end_numeric endNumeric,batch_id batchId from etl_logsummary");
        reader.setRowMapper(new BeanPropertyRowMapper<>(ETLLogSummary.class));
        return reader;
    }

    @Bean
    public KafkaReader kafkaReader(){
        return new KafkaReader();
    }

    @Bean
    public ItemWriter<String> writer(){

        return list -> {
            System.out.println("write result:");
            list.forEach(System.out::println);
            count += list.size();
        };
    }

    /**
     * job setting
     */
    @Bean
    public Job setUserJob(JobCompletionNotificationListener listener, Step step){

        return jobBuilderFactory.get("myJob").incrementer(new RunIdIncrementer())
                .listener(listener).flow(step).end().build();
    }

    @Bean
    public Step step1(){
        return stepBuilderFactory.get("myJob.Step1")
                .<String,String>chunk(5)
                .reader(kafkaReader())
                .processor(logItemProcesser())
                .writer(writer())
                .build();
    }

}
