package com.jrx.anytxn.accounting.job.mergeFile.config;


import com.jrx.anytxn.accounting.config.AnytxnBatchProperties;
import com.jrx.anytxn.accounting.config.PathConfig;
import com.jrx.anytxn.accounting.config.VoucherFileConfig;
import com.jrx.anytxn.accounting.entity.GaVoucherInfo;
import com.jrx.anytxn.accounting.job.mergeFile.step.MergeFileListener;
import com.jrx.anytxn.accounting.job.mergeFile.step.MergeFileWriter;
import com.jrx.anytxn.common.uitl.DateUtils;
import com.jrx.anytxn.common.uitl.JsonUtils;
import com.jrx.anytxn.param.entity.PrOrganizationTable;
import com.jrx.anytxn.param.service.system.IOrganizationTableService;
import com.jrx.anytxn.transaction.constant.TransBizConstant;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.batch.MyBatisCursorItemReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.support.SynchronizedItemStreamReader;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.transaction.PlatformTransactionManager;

import javax.annotation.Resource;
import java.io.FileWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;


/**
 * 合并传票文件
 */
@Configuration
@EnableConfigurationProperties(VoucherFileConfig.class)
public class MergFileGenerateConfiguration {

    private static final Logger logger = LoggerFactory.getLogger(MergFileGenerateConfiguration.class);

    @Resource(name = "bizSqlSessionFactory")
    SqlSessionFactory sqlSessionFactory;
    @Autowired
    public JobBuilderFactory jobBuilderFactory;
    @Autowired
    public StepBuilderFactory stepBuilderFactory;
    @Autowired
    private VoucherFileConfig voucherFileConfig;
    @Autowired
    private AnytxnBatchProperties anytxnBatchProperties;
    @Autowired
    private IOrganizationTableService organizationTableService;
    @Autowired
    private PlatformTransactionManager platformTransactionManager;
    @Autowired
    private PathConfig pathConfig;

    private Map<String, GaVoucherInfo> mergeMap = new ConcurrentHashMap<>();

    @Bean
    public Job mergeFileGenerateJob() {
        return jobBuilderFactory.get("mergeFileGenerateJob")
                .listener(mergeFileJobExecutionListener())
                .start(mergeFileStep())
                .next(mergeFileGenerateStep())
                .build();
    }

    @Bean
    public Step mergeFileStep() {
        return stepBuilderFactory.get("mergeFileStep")
                .<GaVoucherInfo, GaVoucherInfo>chunk(anytxnBatchProperties.getBillingConfiguration().getChunkLimit())//表示一批处理100条记录，数据是逐条发送给processor处理的，处理100条记录后，统一返回给writer进行保存处理
                .reader(mergeFileReader())
                .writer(mergeFileWriter())
                .transactionManager(platformTransactionManager)
                .taskExecutor(new SimpleAsyncTaskExecutor())
                .throttleLimit(anytxnBatchProperties.getBillingConfiguration().getThrottleLimit())//多线程*/
                .build();
    }

    @Bean
    public ItemWriter<? super GaVoucherInfo> mergeFileWriter() {
        return new MergeFileWriter(mergeMap);
    }

    @Bean
    public JobExecutionListener mergeFileJobExecutionListener() {
        return new MergeFileListener(mergeMap);
    }

    @Bean
    @StepScope
    public SynchronizedItemStreamReader<GaVoucherInfo> mergeFileReader() {
        PrOrganizationTable org = organizationTableService.findByOrgIdForBatch(TransBizConstant.DEFAULT_ORG_ID);
        Date businessDate = org.getToday();
        MyBatisCursorItemReader<GaVoucherInfo> billingCursorReader = new MyBatisCursorItemReader<>();
        billingCursorReader.setSaveState(false);
        billingCursorReader.setSqlSessionFactory(sqlSessionFactory);
        billingCursorReader.setQueryId("com.jrx.anytxn.accounting.mapper.ext.ExtVoucherInfoMapper.selectVoucherByDateForBillingBatch");
        Map<String, Object> map = new HashMap<>();
        map.put("businessDate", businessDate);
        billingCursorReader.setParameterValues(map);
        //同步reader   线程安全
        SynchronizedItemStreamReader<GaVoucherInfo> synchronizedItemStreamReader = new SynchronizedItemStreamReader<>();
        synchronizedItemStreamReader.setDelegate(billingCursorReader);
        return synchronizedItemStreamReader;
    }

    @Bean
    public Step mergeFileGenerateStep() {
        return stepBuilderFactory.get("mergeFileGenerateStep")
                .tasklet(mergeFileGenerateTasklet())
                .build();
    }

    @Bean
    @StepScope
    public Tasklet mergeFileGenerateTasklet() {
        return (stepContribution, chunkContext) -> {
            PrOrganizationTable org = organizationTableService.findByOrgIdForBatch(TransBizConstant.DEFAULT_ORG_ID);
            String today = DateUtils.format(org.getToday(), "yyyyMMdd");
            //创建科目明细表过渡表文件地址
            String mergeFilePath = pathConfig.getBasePath() + "/mergeFile/" + org.getTenantId();
            logger.info("泰安并账文件地址为:{}", mergeFilePath);
            //创建科目总账过渡表文件地址
            String mergeFileName = "mergeFileBatch_" + pathConfig.getBatchTaskId() + "_" + today + ".txt";
            logger.info("泰安并账表文件名为:{}", mergeFileName);
            Path fpath = Paths.get(mergeFilePath + "/" + mergeFileName);
            Path pPath = Paths.get(mergeFilePath);
            //判断文件是否存在
            if (!Files.exists(fpath)) {
                //判断文件夹是否存在
                if (!Files.isDirectory(pPath)) {
                    //创建文件夹
                    Files.createDirectories(pPath);
                }
                //创建文件
                Files.createFile(fpath);
            }

            //创建FileWriter
            FileWriter bfw = new FileWriter(fpath.toFile());
            mergeMap.values().forEach(ga ->
                    {
                        try {
                            bfw.write(JsonUtils.toJSon(ga));
                            bfw.write("\n");
                        } catch (Exception e) {
                            logger.info("生成泰安并账文件异常",e);
                        }
                    }
            );
            bfw.flush();
            bfw.close();
            return RepeatStatus.FINISHED;
        };
    }
}
