package com.egao.common.module.system.service.impl;

import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.egao.common.core.web.JsonResult;
import com.egao.common.module.system.entity.Crawler;
import com.egao.common.module.system.mapper.CrawlerMapper;
import com.egao.common.module.system.service.CrawlerService;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.List;

/**
 * 爬虫设置接口实现类
 *
 * @author pyx
 */
@Service
public class CrawlerServiceImpl extends ServiceImpl<CrawlerMapper, Crawler> implements CrawlerService {

    @Override
    public List<Crawler> listAll(Integer type) {
        return baseMapper.selectAll(type);
    }

    @Override
    public JsonResult updateList(List<Crawler> crawlerList) {
        if (updateBatchById(crawlerList)) {
            return JsonResult.ok("修改成功");
        }
        return JsonResult.error("修改失败");
    }

    @Override
    public String[] listOuterPreachUrl() {

        QueryWrapper<Crawler> crawlerQueryWrapper = new QueryWrapper<>();
        crawlerQueryWrapper.eq("crawler", "1").eq("type", 2);
        List<Crawler> listCrawler = baseMapper.selectList(crawlerQueryWrapper);

        ArrayList<String> listUrl = new ArrayList<>();
        for (Crawler crawler : listCrawler) {
            if (!StrUtil.isBlank(crawler.getUrl())) {
                listUrl.add(crawler.getUrl());
            }
        }

        String[] strings = new String[listUrl.size()];
        listUrl.toArray(strings);

        return strings;
    }

    @Override
    public String[] listOfficialUrl() {
        QueryWrapper<Crawler> crawlerQueryWrapper = new QueryWrapper<>();
        crawlerQueryWrapper.eq("crawler", "1").eq("type", 1);
        List<Crawler> crawlerlList = baseMapper.selectList(crawlerQueryWrapper);

        ArrayList<String> listUrl = new ArrayList<>();
        for (Crawler crawler : crawlerlList) {
            if (!StrUtil.isBlank(crawler.getUrl())) {
                listUrl.add(crawler.getUrl());
            }
        }

        String[] strings = new String[listUrl.size()];
        listUrl.toArray(strings);

        return strings;
    }
}
