package com.itc.bi.easyExcelListener;

import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.enums.CellDataTypeEnum;
import com.alibaba.excel.event.AnalysisEventListener;
import com.alibaba.excel.metadata.data.ReadCellData;
import com.alibaba.excel.util.ConverterUtils;
import com.alibaba.excel.util.ListUtils;
import com.alibaba.fastjson.JSON;
import com.itc.bi.constant.KafkaConstant;
import com.itc.bi.dto.kafka.DataSetDTO;
import com.itc.bi.mapper.DirectoryMapper;
import com.itc.bi.service.DataSetColumnService;
import com.itc.bi.service.impl.ClickHouseJDBCService;
import com.itc.bi.utils.DateUtil;
import com.itc.bi.vo.excelVo.HeardVO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;

import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

public class CustomInterceptorSaveData extends AnalysisEventListener<LinkedHashMap<Integer, Object>> {
    /**
     * 每隔5条存储数据库，实际使用中可以1000条，然后清理list ，方便内存回收
     */
    private static final int BATCH_COUNT = 1000;
    private Map<Integer, String> integerStringMap;
    private String tableName;
    private List<Map<String,Object>> cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
    private ClickHouseJDBCService clickHouseJDBCService;
    private Long dtId;
    private KafkaTemplate<String, Object> kafkaTemplate;
    private List<HeardVO> heardVos;

    // 构造函数
    public CustomInterceptorSaveData() {

    }

    // 带参构造函数
    public CustomInterceptorSaveData(String tableName,ClickHouseJDBCService clickHouseJDBCService,Long dtId,KafkaTemplate<String, Object> kafkaTemplate,List<HeardVO> heardVos) {
        this.tableName = tableName;
        this.clickHouseJDBCService = clickHouseJDBCService;
        this.dtId = dtId;
        this.kafkaTemplate = kafkaTemplate;
        this.heardVos = heardVos;
    }

    /**
     * 解析表头
     * @param headMap
     * @param context
     */
    @Override
    public void invokeHead(Map<Integer, ReadCellData<?>> headMap, AnalysisContext context) {
        integerStringMap = ConverterUtils.convertToStringMap(headMap, context);
    }

    /**
     * 每解析一行都会回调此方法
     * @param data    one row value. Is is same as {@link AnalysisContext#readRowHolder()}
     * @param context analysis context
     */
    @Override
    public void invoke(LinkedHashMap<Integer, Object> data, AnalysisContext context) {
        LinkedHashMap<String, Object> stringObjectLinkedHashMap = new LinkedHashMap<>();
        for (Map.Entry<Integer,Object> entry:data.entrySet()){
            Integer key = entry.getKey();
            Object value = entry.getValue();
            String title = integerStringMap.get(key);
            //日期类型  把 ‘/’ 修改为-
            if(value!=null){
                HeardVO heardVO = heardVos.stream().filter(p -> p.getKey().equalsIgnoreCase(title)).findFirst().orElse(null);
                if(heardVO!=null){
                    if(heardVO.getCharacterType().equalsIgnoreCase("date")){
                        value = value.toString().replace("/","-");
                        value = DateUtil.format(value.toString());
                    }
                }
            }
            stringObjectLinkedHashMap.put(title,value);
        }
        cachedDataList.add(stringObjectLinkedHashMap);
        if (cachedDataList.size() >= BATCH_COUNT) {
            saveData();
            cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
        }
        System.out.println("读取到一行数据: " + stringObjectLinkedHashMap);
    }

    /**
     * 所有数据解析完成了 都会来调用
     * @param context
     */
    @Override
    public void doAfterAllAnalysed(AnalysisContext context) {
        saveData();
        //通知数据集已经更新
        if(dtId!=null){
            DataSetDTO dataSetDTO = new DataSetDTO();
            dataSetDTO.setDtId(dtId);
            kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
        }
        System.out.println("所有数据读取完毕");
    }

    /**
     * 保存数据
     */
    private void saveData() {
        System.out.println("保存数据到数据库");
        this.clickHouseJDBCService.insertDataByTableNameHashMap(tableName,cachedDataList);
    }
}

