package io.bba.boot.app.service.impl;

import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import io.bba.boot.app.dto.BatchCrawlerDataDto;
import io.bba.boot.framework.exception.BusinessException;
import io.bba.boot.framework.page.OrderByItem;
import io.bba.boot.framework.page.OrderMapping;
import io.bba.boot.framework.page.Paging;
import io.bba.boot.app.dto.CrawlerDataDto;
import io.bba.boot.app.entity.CrawlerData;
import io.bba.boot.app.mapper.CrawlerDataMapper;
import io.bba.boot.app.query.CrawlerDataQuery;
import io.bba.boot.app.service.CrawlerDataService;
import io.bba.boot.app.vo.CrawlerDataVo;
import io.bba.boot.app.query.AppCrawlerDataQuery;
import io.bba.boot.app.vo.AppCrawlerDataVo;
import io.bba.boot.util.PagingUtil;
import io.bba.boot.util.RedisCache;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;


import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;

/**
 * 采集数据 服务实现类
 *
 * @author zs
 * @since 2024-03-19
 */
@Slf4j
@Service
public class CrawlerDataServiceImpl extends ServiceImpl<CrawlerDataMapper, CrawlerData> implements CrawlerDataService {

    @Autowired
    private CrawlerDataMapper crawlerDataMapper;

    @Autowired
    private RedisCache redisCache;

    @Transactional(rollbackFor = Exception.class)
    @Override
    public boolean addCrawlerData(CrawlerDataDto dto) {
        CrawlerData crawlerData = new CrawlerData();
        BeanUtils.copyProperties(dto, crawlerData);
        return save(crawlerData);
    }

    @Transactional(rollbackFor = Exception.class)
    @Override
    public boolean updateCrawlerData(CrawlerDataDto dto) {
        Long id = dto.getId();
        CrawlerData crawlerData = getById(id);
        if (crawlerData == null) {
            throw new BusinessException("采集数据不存在");
        }
        BeanUtils.copyProperties(dto, crawlerData);
        return updateById(crawlerData);
    }

    @Transactional(rollbackFor = Exception.class)
    @Override
    public boolean deleteCrawlerData(Long id) {
        return removeById(id);
    }

    @Override
    public CrawlerDataVo getCrawlerDataById(Long id) {
        return crawlerDataMapper.getCrawlerDataById(id);
    }

    @Override
    public Paging<CrawlerDataVo> getCrawlerDataPage(CrawlerDataQuery query) {
        OrderMapping orderMapping = new OrderMapping();
        orderMapping.put("createTime", "create_time");
        PagingUtil.handlePage(query, orderMapping, OrderByItem.desc("create_time"));
        List<CrawlerDataVo> list = crawlerDataMapper.getCrawlerDataPage(query);
        Paging<CrawlerDataVo> paging = new Paging<>(list);
        return paging;
    }

    @Override
    public AppCrawlerDataVo getAppCrawlerDataById(Long id) {
        return crawlerDataMapper.getAppCrawlerDataById(id);
    }

    @Override
    public Paging<AppCrawlerDataVo> getAppCrawlerDataPage(AppCrawlerDataQuery query) {
        OrderMapping orderMapping = new OrderMapping();
        orderMapping.put("createTime", "create_time");
        PagingUtil.handlePage(query, orderMapping, OrderByItem.desc("create_time"));
        List<AppCrawlerDataVo> list = crawlerDataMapper.getAppCrawlerDataPage(query);
        Paging<AppCrawlerDataVo> paging = new Paging<>(list);
        return paging;
    }

    @Transactional(rollbackFor = Exception.class)
    @Override
    public boolean batchAddCrawlerData(BatchCrawlerDataDto dtos){
        //redis存储去重
        boolean empty = redisCache.getCacheSet(dtos.getUrl()).isEmpty();
        if (empty){
            Set<CrawlerDataDto> set = redisCache.setCacheSet(dtos.getUrl(), dtos.getDtos().stream().collect(Collectors.toSet())).members();
        }else {
            Set<CrawlerDataDto> set = redisCache.getCacheSet(dtos.getUrl());
            List<CrawlerDataDto> cdatas = set.stream().collect(Collectors.toList());
            dtos.getDtos().removeAll(cdatas);
        }
        //去重
        if (dtos.getDtos() == null)
            throw new BusinessException("采集数据不存在");
        //转对象
        List<CrawlerData> cws = dtos.getDtos().stream().map(dto ->{
            CrawlerData crawlerData = new CrawlerData();
            BeanUtils.copyProperties(dto, crawlerData);
            crawlerData.setCrawlerDate(new Date());
            return crawlerData;
        }).collect(Collectors.toList());

        boolean flag = crawlerDataMapper.batchInsert(cws);
        return flag;
    }

}
