package org.wgc.study.config;

import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.builder.JdbcBatchItemWriterBuilder;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.dao.DeadlockLoserDataAccessException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import org.wgc.study.entity.Bloginfo;
import org.wgc.study.entity.Transaction;
import org.wgc.study.job.ReconciliationResultPreStatementSetter;
import org.wgc.study.job.UserPreStatementSetter;
import org.wgc.study.listner.MyJobListener;
import org.wgc.study.listner.MyReadListener;
import org.wgc.study.listner.MyWriteListener;
import org.wgc.study.listner.TranctionListener;

import javax.sql.DataSource;
import java.math.BigDecimal;

/***
 *@title ReconciliationJobConfig
 *@description <TODO description class purpose>
 *@author wgc_j
 *@version 1.0.0
 *@create 2025-08-08 10:12
 **/
@Component
public class ReconciliationJobConfig {

    @Autowired
    private JobBuilderFactory jobBuilderFactory;

    @Autowired
    private StepBuilderFactory stepBuilderFactory;
    @Autowired
    private DataSource dataSource;
    @Autowired
    private JdbcTemplate jdbcTemplate;
    // 定义Job
    @Bean
    public Job bankReconciliationJob() {

        return jobBuilderFactory.get("bankReconciliationJob").incrementer(new DailyJobIncrementer()) // 每日参数
                .start(reconciliationStep()).build();
    }



    public Step reconciliationStep() {

     return stepBuilderFactory.get("reconciliationStep").<Transaction, ReconciliationResult>chunk(65000) // Chunk的机制(即每次读取一条数据，再处理一条数据，累积到一定数量后再一次性交给writer进行写入操作)
               .reader(reader())
            // .processor(processor(jdbcTemplate))
             .writer(writer(dataSource))
                // .skip(Exception.class).skipLimit(2)
                .listener(new TranctionListener())
                //.processor(processor())
               // .writer(writer(dataSource))
                //.faultTolerant().skip(Exception.class)
                //.skipLimit(2)
                //.listener(new MyWriteListener())
                .build();
    }

    // 文件读取器（CSV格式）
    public ItemReader<Transaction> reader() {
      /*  return new FlatFileItemReaderBuilder<Transaction>().resource(new ClassPathResource("trans-20230520.csv")).delimited().names("id", "amount", "date", "account").fieldSetMapper(new BeanWrapperFieldSetMapper<Transaction>() {{
            setTargetType(Transaction.class);
        }}).build();*/

        // 使用FlatFileItemReader去读cvs文件，一行即一条数据
        FlatFileItemReader<Transaction> reader = new FlatFileItemReader<>();
        // 设置文件处在路径
        reader.setResource(new ClassPathResource("trans-20230520.csv"));
        reader.setLinesToSkip(1);
        // entity与csv数据做映射
        reader.setLineMapper(new DefaultLineMapper<Transaction>() {
            {
                setLineTokenizer(new DelimitedLineTokenizer() {
                    {
                        setNames(new String[]{"account", "date", "amount"});
                    }
                });
                setFieldSetMapper(new BeanWrapperFieldSetMapper<Transaction>() {
                    {
                        setTargetType(Transaction.class);
                    }
                });
            }
        });
        return reader;
    }

    // 数据库比对处理器
    public ItemProcessor<Transaction, ReconciliationResult> processor(JdbcTemplate jdbcTemplate) {

        return transaction -> {
            // 查询内部系统记录
            String sql = "SELECT amount FROM internal_trans WHERE id = ?";
            System.out.println("sql = " + sql);
            BigDecimal internalAmount = jdbcTemplate.queryForObject(sql, BigDecimal.class, transaction.getId());

            // 比对金额差异
             if (internalAmount.compareTo(transaction.getAmount()) != 0) {
                return new ReconciliationResult(transaction, "AMOUNT_MISMATCH", transaction.getAmount() + " vs " + internalAmount);
            }
            return null; // 无差异不写入
        };
    }
    public ReconciliationResultPreStatementSetter preStatementSetter(){
        return new ReconciliationResultPreStatementSetter();
    }
    // 差异报告写入器
    public ItemWriter<ReconciliationResult> writer(DataSource dataSource) {
        return new JdbcBatchItemWriterBuilder<ReconciliationResult>()
                .itemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>())
                .sql("INSERT INTO recon_results (trans_id, error_type, detail) " + "VALUES (?,?, ?)")
                .itemPreparedStatementSetter(preStatementSetter())
                .dataSource(dataSource).build();
    }
}
