package sample.batch.resume;

import java.sql.Types;

import javax.sql.DataSource;

import sample.batch.helloworld.Person;
import sample.batch.helloworld.PersonItemProcessor;

import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.JobRegistry;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.dao.ExecutionContextDao;
import org.springframework.batch.core.repository.dao.Jackson2ExecutionContextStringSerializer;
import org.springframework.batch.core.repository.dao.JdbcExecutionContextDao;
import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao;
import org.springframework.batch.core.repository.dao.JdbcJobInstanceDao;
import org.springframework.batch.core.repository.dao.JobExecutionDao;
import org.springframework.batch.core.repository.dao.JobInstanceDao;
import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory;
import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.ClassPathResource;
import org.springframework.jdbc.core.JdbcOperations;
import org.springframework.stereotype.Component;

/**
 * @date 2020/8/26
 */
@SpringBootApplication
@EnableBatchProcessing
public class ResumeSample {

	public static void main(String[] args) throws Exception {
		SpringApplication.run(ResumeSample.class, args);
	}


	@Autowired
	public JobBuilderFactory jobBuilderFactory;

	@Autowired
	public StepBuilderFactory stepBuilderFactory;

	@Bean
	public FlatFileItemReader<Person> reader() {
		return new FlatFileItemReaderBuilder<Person>()
				.name("personItemReader")
				.resource(new ClassPathResource("sample-data.csv"))
				.delimited()
				.names(new String[]{"firstName", "lastName"})
				.fieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
					setTargetType(Person.class);
				}})
				.build();
	}

	@Bean
	public PersonItemProcessor processor() {
		return new PersonItemProcessor();
	}

	@Bean
	public ItemWriter<Person> writer() {
		return items -> {
			for (Person person : items) {
				System.out.println(person);
			}
		};
	}

	@Bean
	public Job importUserJob(Step step1) {
		return jobBuilderFactory.get("simpleUserJob")
				.start(step1)
				//.preventRestart()
				.build();
	}

	@Bean
	public Step step1() {
		return stepBuilderFactory.get("step1")
				.<Person, Person> chunk(2)
				.reader(reader())
				.processor(processor())
				.writer(writer())
				.faultTolerant()
				.noRollback(IllegalArgumentException.class)
				.allowStartIfComplete(true)
				.build();
	}

	@Component
	public static class ResumeRunner implements CommandLineRunner {

		@Autowired
		private Step step;
		@Autowired
		private Job job;
		@Autowired
		private JobRepository jobRepository;
		//@Autowired
		private JobInstanceDao jobInstanceDao;
		//@Autowired
		private JobExecutionDao jobExecutionDao;
		@Autowired
		private JdbcOperations jdbcOperations;
		@Autowired
		private DataSource dataSource;

		@Override
		public void run(String... args) throws Exception {
			JobExecutionDao dao = createJobExecutionDao();
			ExecutionContextDao ecDao = createExecutionContextDao();
			JobInstanceDao instanceDao = createJobInstanceDao();

			JobExecution jobExecution = dao.getJobExecution(92L);
			ExecutionContext ec = ecDao.getExecutionContext(jobExecution);
			JobInstance jobInstance = instanceDao.getJobInstance(jobExecution);

			jobExecution.setExecutionContext(ec);
			jobExecution.setJobInstance(jobInstance);

			job.execute(jobExecution);
		}

		protected JobInstanceDao createJobInstanceDao() throws Exception {
			DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource);
			JdbcJobInstanceDao dao = new JdbcJobInstanceDao();
			dao.setJdbcTemplate(jdbcOperations);
			dao.setJobIncrementer(incrementerFactory.getIncrementer("MySQL",
					"BATCH_JOB_SEQ"));
			dao.setTablePrefix("BATCH_");
			dao.afterPropertiesSet();
			return dao;
		}

		protected JobExecutionDao createJobExecutionDao() throws Exception {
			DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource);
			JdbcJobExecutionDao dao = new JdbcJobExecutionDao();
			dao.setJdbcTemplate(jdbcOperations);
			dao.setJobExecutionIncrementer(incrementerFactory.getIncrementer("MySQL",
					"BATCH_JOB_EXECUTION_SEQ"));
			dao.setTablePrefix("BATCH_");
			dao.setClobTypeToUse(Types.CLOB);
			dao.setExitMessageLength(2000);
			dao.afterPropertiesSet();
			return dao;
		}
		protected ExecutionContextDao createExecutionContextDao() throws Exception {
			JdbcExecutionContextDao dao = new JdbcExecutionContextDao();
			dao.setJdbcTemplate(jdbcOperations);
			dao.setTablePrefix("BATCH_");
			dao.setClobTypeToUse(Types.CLOB);
			dao.setSerializer(new Jackson2ExecutionContextStringSerializer());

			dao.afterPropertiesSet();
			// Assume the same length.
			dao.setShortContextLength(2000);
			return dao;
		}
	}
}
