package com.people.netmon.reddit.probe.service.impl;

import ai.people.core.standard.fill.entity.Msg;
import ai.people.core.standard.fill.service.SendNewAccountToTopicService;
import ai.people.netmon.framework.businessenum.NewAccountNeedFillTopicEnum;
import ai.people.netmon.framework.businessenum.gather.GatherStatusEnum;
import ai.people.netmon.framework.constant.SourceCollectionConstant;
import ai.people.netmon.framework.domain.reddit.probe.entity.RedditAccount;
import ai.people.netmon.framework.exception.enums.CommonEnum;
import ai.people.netmon.framework.utils.AssertUtils;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.people.netmon.reddit.probe.mapper.RedditAccountMapper;
import com.people.netmon.reddit.probe.service.RedditAccountService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;

import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;

/**
 * <p>
 *  服务实现类
 * </p>
 *
 * @author zhenglin
 * @since 2022-06-23
 */
@Service
@Slf4j
public class RedditAccountServiceImpl extends ServiceImpl<RedditAccountMapper, RedditAccount> implements RedditAccountService, SendNewAccountToTopicService {

    @Autowired
    KafkaTemplate<String, String> kafkaTemplate;

    /**
     * 批量保存reddit账号信息
     *
     * @param list 列表
     */
    @Override
    @Transactional(rollbackFor = Exception.class)
    public void saveBatchData(List<RedditAccount> list) {
        AssertUtils.isTrue(!CollectionUtils.isEmpty(list), CommonEnum.FAIL);
        log.info("接收数据条数:{}",list.size());
        list = list.stream().filter(x -> Objects.nonNull(x) && StringUtils.isNotBlank(x.getAccountName())).peek(x -> {
            x.setCollectionStatus(GatherStatusEnum.TO_BE_COLLECTED.getCode());
            if (StringUtils.isBlank(x.getCollectionFrequency())){
                x.setCollectionFrequency(SourceCollectionConstant.COLLECTION_FREQUENCY_STR);
            }
        }).collect(Collectors.toList());
        log.info("过滤为空数据条数:{}",list.size());
        List<String> accountNames = list.stream().map(RedditAccount::getAccountName).collect(Collectors.toList());
        LambdaQueryWrapper<RedditAccount> queryWrapper = Wrappers.lambdaQuery();
        //去数据库查询一遍哪些重复
        List<RedditAccount> already = list(queryWrapper.select(RedditAccount::getAccountName).in(RedditAccount::getAccountName, accountNames));
        if (!CollectionUtils.isEmpty(already)) {
            //删除重复的
            Set<String> alreadyName = already.stream().map(RedditAccount::getAccountName).collect(Collectors.toSet());
            list = list.stream().filter(x -> !alreadyName.contains(x.getAccountName())).distinct().collect(Collectors.toList());
        }
        log.info("过滤重复后剩余数据条数:{}",list.size());
        if (!CollectionUtils.isEmpty(list)) {
            boolean b = saveBatch(list);
            AssertUtils.isTrue(b, CommonEnum.FAIL);
            //推送kafka
            log.info("批量保存数量:{}", list.size());
            List<Msg> msgList = new ArrayList<>();
            list.forEach(x -> msgList.add(new Msg(x)));
            this.sendNewAccountData(NewAccountNeedFillTopicEnum.NEW_ACCOUNT_NEED_FILL_REDDIT.name(), msgList, kafkaTemplate);
        }
    }
}
