package com.diy.sigmund.diyuser.common.middleware.easyexcel;

import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
import com.alibaba.excel.metadata.data.ReadCellData;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.extension.service.IService;
import com.diy.sigmund.diycommon.entity.base.exception.DiyRuntimeException;
import com.diy.sigmund.diycommon.util.json.JacksonUtil;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Async;

/**
 * @author ylm-sigmund
 * @since 2023/4/20 17:33
 */
public class ImportExcelListener<T> extends AnalysisEventListener<T> {

    private static final Logger log = LoggerFactory.getLogger(ImportExcelListener.class);
    /**
     * 每隔1000条存储数据库，然后清理list，方便内存回收
     */
    private static final int BATCH_COUNT = 1000;
    /**
     * 临时存储
     */
    private List<T> cachedData = new ArrayList<>(BATCH_COUNT);
    /**
     * 假设这个是一个DAO，当然有业务逻辑这个也可以是一个service。当然如果不用存储这个对象没用。
     */
    private IService<T> iService;

    /**
     * 如果使用了spring,请使用这个构造方法。每次创建Listener的时候需要把spring管理的类传进来
     */
    public ImportExcelListener(IService<T> iService) {
        this.iService = iService;
    }

    /**
     * 这里会一行行的返回头
     */
    @Override
    public void invokeHead(Map<Integer, ReadCellData<?>> headMap, AnalysisContext context) {
        Integer approximateTotalRowNumber = context.readSheetHolder().getApproximateTotalRowNumber();
        log.info("总条数：{}", approximateTotalRowNumber);
        if (approximateTotalRowNumber > 50001) {
            throw new DiyRuntimeException("行数超过限制");
        }
        log.info("解析到一条头数据:{}", JacksonUtil.toJson(headMap));
        context.readWorkbookHolder().setIgnoreEmptyRow(false);
        // 解析到一条头数据:{"0":{"type":"STRING","stringValue":"额度利率导入模板表","formula":false,"dataFormat":0,"dataFormatString":"General"}}
        // log.info("解析到一条头数据:{}", JacksonUtil.toJson(headMap));
    }

    /**
     * 这个每一条数据解析都会来调用
     */
    @Override
    public void invoke(T t, AnalysisContext analysisContext) {
        log.info("解析到一条数据:{}", JSON.toJSONString(t));
        cachedData.add(t);
        if (cachedData.size() >= BATCH_COUNT) {
            batchSaveData();
            cachedData = new ArrayList<>(BATCH_COUNT);
        }
    }

    @Override
    public void doAfterAllAnalysed(AnalysisContext context) {
        // 这里也要保存数据，确保最后遗留的数据也存储到数据库
        batchSaveData();
    }

    @Async
    public void batchSaveData() {
        log.info("{}条数据，开始存储数据库!", cachedData.size());
        iService.saveBatch(cachedData);
        log.info("存储数据库成功!");
    }
}
