package com.example.batchprocessing;

import com.alibaba.druid.pool.DruidDataSource;
import com.mysql.jdbc.jdbc2.optional.MysqlDataSource;
import java.util.concurrent.ExecutorService;
import javax.sql.DataSource;

import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.builder.JdbcBatchItemWriterBuilder;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.core.task.TaskExecutor;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionManager;

// tag::setup[]
@Slf4j
@Configuration
@EnableBatchProcessing
public class BatchConfiguration {

	@Autowired
	public JobBuilderFactory jobBuilderFactory;

	@Autowired
	public StepBuilderFactory stepBuilderFactory;
	// end::setup[]


	/**
	 * FIXME SQL 合并告警 参考IPCDruidDataSourceStatLoggerImpl
	 * @param dsConfig
	 * @return
	 * @throws Exception
	 */
	@Bean(name = "dataSource", initMethod = "init", destroyMethod = "close")
	public DataSource dataSource(DatasourceProperties dsConfig)
			throws Exception {
//		return DataSourceBuilder.create().driverClassName(dsConfig.getDriverClassName())
//				.password(dsConfig.getPassword())
//				.type(com.mysql.jdbc.jdbc2.optional.MysqlDataSource.class)
//				.url(dsConfig.getUrl()).username(dsConfig.getUsername()).build();
		DruidDataSource dataSource = new DruidDataSource();
		dataSource.setName(dsConfig.getDriverClassName());
		dataSource.setUrl(dsConfig.getUrl());
		dataSource.setUsername(dsConfig.getUsername());
		dataSource.setPassword(dsConfig.getPassword());
		dataSource.setInitialSize(dsConfig.getInitialSize());
		dataSource.setMinIdle(dsConfig.getMinIdle());
		dataSource.setMaxActive(dsConfig.getMaxActive());
		dataSource.setMaxWait(dsConfig.getMaxWait());
		dataSource.setTimeBetweenEvictionRunsMillis(dsConfig.getTimeBetweenEvictionRunsMillis());
		dataSource.setMinEvictableIdleTimeMillis(dsConfig.getMinEvictableIdleTimeMillis());
		dataSource.setTimeBetweenEvictionRunsMillis(60000);
		dataSource.setFilters("slf4j");
		dataSource.setValidationQuery("SELECT 1");
		dataSource.setTestWhileIdle(true);
		dataSource.setTestOnReturn(false);
		dataSource.setTestOnBorrow(false);
		dataSource.setPoolPreparedStatements(false);
//		SlowSqlStatFilter statFilter = new SlowSqlStatFilter(alarmService,50L,true,true);
//		dataSource.setProxyFilters(asList(statFilter, new RWSeparationSqlDruidFilter()));
		return dataSource;
	}

	@Bean("weTaskExecutor")
	public TaskExecutor taskExecutor(){
		ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
		executor.setCorePoolSize(2);
		executor.setThreadNamePrefix("we-task");
		return executor;
	}

//	@Primary
//	@Bean
//	PlatformTransactionManager transactionManager(DataSource dataSource) {
//		return new DataSourceTransactionManager(dataSource);
//	}

	// tag::readerwriterprocessor[]
//	@Bean
//	public FlatFileItemReader<Person> reader() {
//		return new FlatFileItemReaderBuilder<Person>()
//			.name("personItemReader")
//			.resource(new ClassPathResource("sample-data.csv"))
//			.delimited()
//			.names(new String[]{"firstName", "lastName"})
//			.fieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
//				setTargetType(Person.class);
//			}})
//			.build();
//	}
//
//	@Bean
//	public PersonItemProcessor processor() {
//		return new PersonItemProcessor();
//	}
//
//	@Bean
//	public JdbcBatchItemWriter<Person> writer(DataSource dataSource) {
//
//		return new JdbcBatchItemWriterBuilder<Person>()
//			.itemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>())
//			.sql("INSERT INTO people (person_id, first_name, last_name) VALUES (:firstName, :firstName, :lastName)")
//			.dataSource(dataSource)
//			.build();
//	}
//	// end::readerwriterprocessor[]
//
//	// tag::jobstep[]
//	@Bean
//	public Job importUserJob(JobCompletionNotificationListener listener, Step step1) {
//		return jobBuilderFactory.get("importUserJob")
//			.incrementer(new RunIdIncrementer())
//			.listener(listener)
//			.flow(step1)
//			.end()
//			.build();
//	}
//
//	@Bean
//	public Step step1(JdbcBatchItemWriter<Person> writer) {
//		return stepBuilderFactory.get("step1")
//			.<Person, Person> chunk(200)
//			.reader(reader())
//			.processor(processor())
//			.writer(writer)
//			.build();
//	}
//	// end::jobstep[]
}
