package com.spark.lola.job;

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.spark.lola.entity.statistics.StatisticsStoreProduct;
import com.spark.lola.entity.warehouse.StoreProduct;
import com.spark.lola.mapper.StatisticsStoreProductMapper;
import com.spark.lola.mapper.StoreProductMapper;
import com.spark.lola.support.common.constant.CommonConstant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;

import java.util.ArrayList;
import java.util.List;

/**
 * @Author: DengYeJun
 * @Date: 2020/5/12 11:27
 */
@Component
public class StatisticsJob {
    private Logger logger = LoggerFactory.getLogger(StatisticsJob.class);
    private KafkaTemplate kafkaTemplate;
    @Autowired
    private StoreProductMapper storeProductMapper;
    @Autowired
    private StatisticsStoreProductMapper statisticsStoreProductMapper;
    @Autowired
    public StatisticsJob(KafkaTemplate kafkaTemplate) {
        this.kafkaTemplate = kafkaTemplate;
    }

    /**
     * 创建每日货品统计记录
     */
    @Scheduled(cron = "0 10 0 * * ?")
    @Transactional(rollbackFor = Exception.class)
    public void createProduct() {
        logger.info("===创建每日货品统计记录===START===");
        QueryWrapper<StoreProduct> query = new QueryWrapper<>();
        query.eq("used", CommonConstant.Y);
        List<StoreProduct> lst = storeProductMapper.selectList(query);
        if (CollectionUtils.isEmpty(lst)) {
            return;
        }
        lst.forEach(o -> {
            StatisticsStoreProduct item = new StatisticsStoreProduct(o);
            statisticsStoreProductMapper.insert(item);
        });
        logger.info("===创建每日货品统计记录===END===");
    }


}
