package com.xayy.health.core.listener;

import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.read.listener.ReadListener;
import com.alibaba.excel.util.ListUtils;
import com.alibaba.fastjson.JSON;
import com.xayy.health.mgt.model.BatchUploadData;
import com.xayy.health.mgt.service.impl.BatchUploadDataServiceImpl;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.multipart.MultipartFile;

import java.util.List;

/**
 * @author zhangxa
 */
@Slf4j
public class UploadDataListener implements ReadListener<BatchUploadData> {

    @Autowired
    private BatchUploadDataServiceImpl batchUploadDataServiceImpl;

    private MultipartFile file;

    private Long fileLogo;

    public UploadDataListener(BatchUploadDataServiceImpl batchUploadDataServiceImpl, MultipartFile file, Long fileLogo) {
        this.batchUploadDataServiceImpl = batchUploadDataServiceImpl;
        this.file = file;
        this.fileLogo = fileLogo;
    }

    private static final int BATCH_COUNT = 50;
    private List<BatchUploadData> cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);


    @Override
    public void doAfterAllAnalysed(AnalysisContext analysisContext) {
        saveData();
    }

    @Override
    public void invoke(BatchUploadData batchUploadData, AnalysisContext analysisContext) {
        log.info("解析到一条数据:{}", JSON.toJSONString(batchUploadData));
        batchUploadData.setFileName(file.getOriginalFilename().substring(0, file.getOriginalFilename().lastIndexOf(".")));
        batchUploadData.setFileLogo(fileLogo);
        cachedDataList.add(batchUploadData);
        if (cachedDataList.size() >= BATCH_COUNT) {
            saveData();
            cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
        }
    }

    /**
     * 加上存储数据库
     */
    private void saveData() {
        batchUploadDataServiceImpl.save(cachedDataList);
    }

}
