package com.tansuo365.test1.listener;

import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
import com.alibaba.fastjson.JSON;
import com.tansuo365.test1.bean.InfoData;
import com.tansuo365.test1.bean.otherbean.DWuliao;
import com.tansuo365.test1.service.excelservice.IAllDatasService;
import com.tansuo365.test1.service.kucunservice.WuliaoKucunSearService;
import com.tansuo365.test1.util.BeanMapUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.PropertySource;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

//listener不能被spring管理,要每次读取excel都要new,然后里面用到spring可以构造方法传进去
//@PropertySource(value = "classpath:excel.properties")
public class UploadWuliaoDataListener extends AnalysisEventListener<InfoData> {
    private static final Logger LOGGER =
            LoggerFactory.getLogger(UploadWuliaoDataListener.class);

    //每隔2500条存储数据库,实际使用3000条可以,然后清理list,方便内存回收
//    @Value("${onetime.batch.count}")
    private int BATCH_COUNT = 3000;

    List<InfoData> list = new ArrayList<InfoData>();

    Map<String,Object> paramsMap;

    //传入的service层,不同的货品类型调用不同的service
    private IAllDatasService datasService;

    private
    Map<String,Object> insertMap = new HashMap<>();

    /**
     * 通用的goodsService,代表着有关于货品价格,货品存储,货品进出口的相关增删改查等服务层
     * 在本类中使用insertBatch方法,做为读取excel的分段导入时的数据保存
     *
     * @param datasService
     */
    public UploadWuliaoDataListener(IAllDatasService datasService, Map<String,Object> paramsMap) {
        this.datasService = datasService;
        this.paramsMap = paramsMap;
    }

    //每一条数据解析都会来调用 TODO 如果感觉判定阻碍了传入的效率则取消 if saveCode判定
    @Override
    public void invoke(InfoData infoData, AnalysisContext analysisContext) {
        LOGGER.info("解析到一条物料数据:{}", JSON.toJSONString(infoData));
        Map map = null;
        try {
            map = BeanMapUtil.convertBean2Map(infoData);
        } catch (Exception e) {
            e.printStackTrace();
        }
        //这里有的参数仅适合物料表
        LOGGER.info("解析到一条数据map:{}", JSON.toJSONString(map));
        map.put("w_id",Integer.valueOf((String) paramsMap.get("wRoot")));
        map.put("o_id",Integer.valueOf((String) paramsMap.get("oSub")));
        InfoData infoDataAndWOID = null;
        try {
            infoDataAndWOID = BeanMapUtil.convertMapToBean(DWuliao.class,map);
        } catch (Exception e) {
            e.printStackTrace();
        }
        LOGGER.info("加入参数后:{}", JSON.toJSONString(infoDataAndWOID));
        list.add(infoDataAndWOID);
//        insertMap.put("list",list);
//        List list = (List) insertMap.get("list");
//        System.out.println("list.size():"+list.size());
//            insertMap.put("w_id",paramsMap.get("wRoot"));
//            insertMap.put("o_id",paramsMap.get("oSub")););
//        insertMap.put("paramsMap",paramsMap);
        //达到BATCH_COUNT了,需要去存储一次数据库,防止数据几万条数据在内存.容易OOM
        if (list.size() >= BATCH_COUNT) {
            saveData();
            list.clear();//存储完成后清理list
        }
    }

    @Override
    public void doAfterAllAnalysed(AnalysisContext analysisContext) {
        //这里也要保存数据,确保最后遗留的数据也存储到数据库
        saveData();
        LOGGER.info("所有数据解析保存完成!");
    }

    //save data , when excel data in here
    private void saveData() {
        LOGGER.info("{}条数据，开始存储数据库！", list.size());
        datasService.insertBatch(list); //map参数
        LOGGER.info("存储数据库成功！");
    }
}
