package com.dayouzi.crawler_monitor.service.impl;

import com.dayouzi.crawler_monitor.base.exception.BusinessException;
import com.dayouzi.crawler_monitor.config.DataSource;
import com.dayouzi.crawler_monitor.mapper.CrawlerMapper;
import com.dayouzi.crawler_monitor.pojo.entity.MachineEnum;
import com.dayouzi.crawler_monitor.pojo.page.PageBean;
import com.dayouzi.crawler_monitor.pojo.query.CrawlerDetailQuery;
import com.dayouzi.crawler_monitor.pojo.query.CrawlerListQuery;
import com.dayouzi.crawler_monitor.pojo.query.ErrorTypeListQuery;
import com.dayouzi.crawler_monitor.pojo.query.HonorListQuery;
import com.dayouzi.crawler_monitor.pojo.vo.*;
import com.dayouzi.crawler_monitor.service.CrawlerService;
import com.dayouzi.crawler_monitor.utils.DateUtils;
import com.dayouzi.crawler_monitor.utils.HumpToUnderLineUtil;
import com.dayouzi.crawler_monitor.utils.ScheduleUtils;
import com.github.pagehelper.PageHelper;
import lombok.extern.slf4j.Slf4j;
import org.omg.CORBA.INTERNAL;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;

import java.time.LocalDate;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Collectors;

/**
 * <p>
 * 服务实现类
 * </p>
 *
 * @author DaYouZi
 * @since 2023-03-06
 */
@Service
@Slf4j
@DataSource(value = "onlyforcrawlerweb")
public class CrawlerServiceImpl implements CrawlerService {

    @Autowired
    private CrawlerMapper crawlerMapper;

    @Override
    public PageBean<CrawlerListVO> listPage(Integer page, Integer limit, CrawlerListQuery crawlerListQuery) {
        // (单列排序)处理orderBys中的字段名，将驼峰命名改为下划线命名
        HashMap orderBys = crawlerListQuery.getOrderBys();
        if (orderBys != null && orderBys.size() > 0
                && (orderBys.get("order").equals("ASC") || orderBys.get("order").equals("DESC"))) {
            String hump = (String) orderBys.get("orderColumn");
            // 如果是lastTime字段，则需要倒置排序规则
            if ("lastTime".equals(hump)) {
                String order = (String) orderBys.get("order");
                orderBys.put("order", order.equals("ASC") ? "DESC" : "ASC");
            }

            // 将驼峰命名改为下划线命名
            String underLine = HumpToUnderLineUtil.humpToUnderline(hump);
            orderBys.put("orderColumn", underLine);
        }
//        // (多列排序)处理orderBys中的字段名，将驼峰命名改为下划线命名
//        List<HashMap> orderBys = crawlerListQuery.getOrderBys();
//        if (orderBys != null) {
//            for (HashMap orderBy : orderBys) {
//                String hump = (String) orderBy.get("orderColumn");
//                // 如果是lastTime字段，则需要倒置排序规则
//                if ("lastTime".equals(hump)) {
//                    String order = (String) orderBy.get("order");
//                    orderBy.put("order", order.equals("ASC") ? "DESC" : "ASC");
//                }
//                String underLine = HumpToUnderLineUtil.humpToUnderline(hump);
//                orderBy.put("orderColumn", underLine);
//            }
//        }

        // 分页查询
        PageHelper.startPage(page, limit);
        List<CrawlerListVO> list = crawlerMapper.listPage(crawlerListQuery);

        // 处理日期间隔时间
        for (CrawlerListVO crawlerListVO : list) {
            // 计算最近一次运行时间与当前时间相差多少天
            String lastTime = crawlerListVO.getLastTime();
            LocalDate startTime = DateUtils.parseByHour(lastTime);
            LocalDate endTime = LocalDate.now();
            long until = startTime.until(endTime, ChronoUnit.DAYS); // 比较相差多少天
            lastTime = String.valueOf(until);
            crawlerListVO.setLastTime(lastTime);
        }
        return new PageBean<>(list);
    }

    @Override
    public ArrayList<HashMap<String, String>> getPersonList() {
        ArrayList<HashMap<String, String>> result = new ArrayList<>();
        ArrayList<String> list = crawlerMapper.getPersonList();
        for (String person : list) {
            HashMap<String, String> map = new HashMap<>();
            map.put("person", person);
            result.add(map);
        }
        return result;
    }

    @Override
    public ArrayList<HashMap<String, String>> getDropdownList(Integer id) {
        ArrayList<HashMap<String, String>> result = new ArrayList<>();
        ArrayList<String> list = crawlerMapper.getDropdownList(id);
        for (String dropdown : list) {
            HashMap<String, String> map = new HashMap<>();
            map.put("dropdown", dropdown);
            result.add(map);
        }
        return result;
    }

    @Override
    public ArrayList<HashMap<String, Integer>> getSpiderTypeList() {
        ArrayList<HashMap<String, Integer>> result = new ArrayList<>();
        ArrayList<Integer> list = crawlerMapper.getSpiderTypeList();
        for (Integer spiderType : list) {
            HashMap<String, Integer> map = new HashMap<>();
            map.put("spiderType", spiderType);
            result.add(map);
        }
        return result;
    }

    @Override
    public ArrayList<HashMap<String, String>> getMachineList(String person, Integer spiderType) {
        ArrayList<HashMap<String, String>> result = new ArrayList<>();
        ArrayList<String> list = crawlerMapper.getMachineList(person, spiderType);
        for (String machine : list) {
            HashMap<String, String> map = new HashMap<>();
            map.put("machine", machine);
            result.add(map);
        }
        return result;
    }

    @Override
    public ArrayList<HashMap<String, String>> getProjectNameList(String person, Integer spiderType) {
        ArrayList<HashMap<String, String>> result = new ArrayList<>();
        ArrayList<String> list = crawlerMapper.getProjectNameList(person, spiderType);
        for (String projectName : list) {
            HashMap<String, String> map = new HashMap<>();
            map.put("projectName", projectName);
            result.add(map);
        }
        return result;
    }

    @Override
    public ArrayList<CrawlerPreviewVO> getCrawlerPreview(Integer id) {
        return crawlerMapper.getCrawlerPreview(id);
    }

    @Override
    public CrawlerDetailVO getCrawlerDetail(Integer page, Integer limit, CrawlerDetailQuery crawlerDetailQuery) {
        // 获取爬虫详情信息
        CrawlerDetailVO crawlerDetailVO = crawlerMapper.getCrawlerDetail(crawlerDetailQuery);

        // 获取爬虫错误数据列表
        PageHelper.startPage(page, limit);
        List<CrawlerDetailListVO> errList = crawlerMapper.getCrawlerDetailList(crawlerDetailQuery);
        for (CrawlerDetailListVO crawlerDetailListVO : errList) {
            String type = crawlerDetailListVO.getType();
            ArrayList<String> typeList = new ArrayList<>();
            if (type.contains(",")) { // 通过逗号切割
                typeList.addAll(Arrays.asList(type.split(",")));
            } else {
                typeList.add(type);
            }
            crawlerDetailListVO.setTypeList(typeList); // 填充错误类型list
        }
        PageBean<CrawlerDetailListVO> pageBean = new PageBean<>(errList);

        // 封装数据
        crawlerDetailVO.setErrList(pageBean);
        return crawlerDetailVO;
    }

    @Override
    public ArrayList<HashMap<String, String>> getCrawlerDetailTypes(ErrorTypeListQuery errorTypeListQuery) {
        ArrayList<HashMap<String, String>> result = new ArrayList<>();
        ArrayList<String> list = crawlerMapper.getCrawlerDetailTypes(errorTypeListQuery);
        for (String type : list) {
            HashMap<String, String> map = new HashMap<>();
            map.put("type", type);
            result.add(map);
        }
        return result;
    }

    @Override
    public int updateStatus(Integer did, Integer status) {
        return crawlerMapper.updateStatus(did, status);
    }

    @Override
    public void updatePerson(Integer id, String person) {
        int result = crawlerMapper.updatePerson(id, person);
        if (result != 1) {
            throw new BusinessException("修改失败");
        }
    }


    @Override
    public void multipleHandle(Integer status, List arr) {
        if (arr.size() == 0) {
            throw new BusinessException("请选择数据");
        }

        int result = crawlerMapper.multipleHandle(status, arr);
        if (result == 0) {
            throw new BusinessException("处理失败");
        }
    }

    @Override
    public void toppingData(Integer userId, List arr) {
        if (arr.size() == 0) {
            throw new BusinessException("请选择数据");
        }

        List<Integer> insertList = new ArrayList<>();
        List<Integer> updateList = new ArrayList<>();
        // 遍历分类是否为已置顶的数据
        for (Object o : arr) {
            Integer dataId = Integer.valueOf(String.valueOf(o));
            Integer isTop = crawlerMapper.getToppingData(userId, dataId);
            if (isTop == null || isTop == 0) {
                insertList.add(dataId);
            } else {
                updateList.add(dataId);
            }
        }

        long timestampNow = DateUtils.getTimestampNow();
        if (insertList.size() > 0) {
            int result = crawlerMapper.insertToppingData(userId, timestampNow, insertList);
        }

        if (updateList.size() > 0) {
            int result2 = crawlerMapper.updateToppingData(userId, timestampNow, updateList);
        }
    }

    @Override
    public void unToppingData(Integer userId, List arr) {
        if (arr.size() == 0) {
            throw new BusinessException("请选择数据");
        }

        int result = crawlerMapper.deleteToppingData(userId, arr);
        if (result == 0) {
            throw new BusinessException("处理失败");
        }
    }

    @Override
    public void setValid(Integer isValid, List arr) {
        if (arr.size() == 0) {
            throw new BusinessException("请选择数据");
        }
        int result = crawlerMapper.setValid(isValid, arr);
        if (result == 0) {
            throw new BusinessException("处理失败");
        }
    }

    @Override
    public int getCrawlerCount() {
        return crawlerMapper.getCrawlerCount();
    }

    @Override
    public int getErrorCrawlerCount() {
        return crawlerMapper.getErrorCrawlerCount();
    }

    @Transactional
    @Override
    public void changePdf(Integer id) {
        crawlerMapper.changePdf(id);
        crawlerMapper.updatePdf(id, 1);
    }

    @Override
    public void changePdf2(Integer id) {
        crawlerMapper.updatePdf(id, 0);
    }

    @Override
    public void updateRemarks(Integer id, String remarks) {
        int result = crawlerMapper.updateRemarks(id, remarks);
        if (result != 1) {
            throw new BusinessException("修改失败");
        }
    }

    @Override
    public Integer updateScheduledName(Integer id, String scheduledName, String machine) {
        // 根据定时任务名称获取定时任务id
        Integer scheduledId = ScheduleUtils.getScheduledId(machine, scheduledName);
        if (scheduledId == 0) {
            throw new BusinessException("未找到该定时任务，请检查定时任务名称或服务器ip是否正确");
        }

        int result = crawlerMapper.updateScheduledName(id, scheduledName, scheduledId);
        if (result != 1) {
            throw new BusinessException("修改失败");
        }
        return scheduledId;
    }

    @Override
    public void unupdateScheduledName(Integer id) {
        int result = crawlerMapper.unupdateScheduledName(id);
        if (result != 1) {
            throw new BusinessException("修改失败");
        }
    }

    @Override
    public ArrayList<ScheduledTaskVO> getScheduledTask(String person) {
        // 根据负责人名称获取爬虫机器ip
        ArrayList<ScheduledTaskVO> list = crawlerMapper.getScheduledTask(person);
        for (ScheduledTaskVO taskVO : list) {
            // 根据枚举类填充服务器id
            taskVO.setMachineId(MachineEnum.getMachineId(taskVO.getMachine()));
            // 获取定时任务执行情况
            taskVO = ScheduleUtils.getSituation(taskVO);
        }

        // 通过stream流筛选出执行失败的
        List<ScheduledTaskVO> collect = list.stream().filter(x -> x.getList().size() > 0).collect(Collectors.toList());

        return (ArrayList<ScheduledTaskVO>) collect;
//        return new ArrayList<>();
    }

    @Override
    public ArrayList<HashMap> querySearchAsync(String codeName, String person, Integer spiderType) {
        ArrayList<HashMap> result = new ArrayList<>();
        ArrayList<String> list = crawlerMapper.querySearchAsync(codeName, person, spiderType);
        for (String s : list) {
            HashMap<String, String> map = new HashMap<>();
            map.put("value", s);
            result.add(map);
        }
        return result;
    }

    @Override
    public PageBean<HonorListVO> honorListPage(Integer page, Integer limit, HonorListQuery honorListQuery) {
        // 分页查询
        PageHelper.startPage(page, limit);
        List<HonorListVO> list = crawlerMapper.honorListPage(honorListQuery);
        return new PageBean<>(list);
    }

    @Override
    public void honorUpdateData(Integer id, Integer status) {
        int result = crawlerMapper.honorUpdateData(id, status);
        if (result != 1) {
            throw new BusinessException("修改失败");
        }
    }

    @Override
    public void honorMultipleHandle(Integer status, List arr) {
        if (arr.size() == 0) {
            throw new BusinessException("请选择数据");
        }

        int result = crawlerMapper.honorMultipleHandle(status, arr);
        if (result == 0) {
            throw new BusinessException("处理失败");
        }
    }

    @Override
    public ArrayList<HashMap<String, String>> honorAuthorList() {
        ArrayList<HashMap<String, String>> result = new ArrayList<>();
        ArrayList<String> list = crawlerMapper.honorAuthorList();
        for (String author : list) {
            HashMap<String, String> map = new HashMap<>();
            map.put("author", author);
            result.add(map);
        }
        return result;
    }

    @Override
    public ArrayList<HashMap<String, String>> honorPrizeList() {
        ArrayList<HashMap<String, String>> result = new ArrayList<>();
        ArrayList<String> list = crawlerMapper.honorPrizeList();
        for (String prize : list) {
            HashMap<String, String> map = new HashMap<>();
            map.put("prize", prize);
            result.add(map);
        }
        return result;
    }

    @Override
    public CompanyRecordDetailVO getCompanyRecordDetail(Integer id) {
        return crawlerMapper.getCompanyRecordDetail(id);
    }

    @Override
    public int oneTouchIgnore(Integer id, String type) {
        // 获取指定爬虫的指定类型的错误数据的id
        ArrayList<Integer> ids = crawlerMapper.selectErrorIds(id, type);
        return crawlerMapper.ontTouchIgnore(ids);
    }

    @Override
    public boolean verifiedAuthor(String author) {
        String s = crawlerMapper.getAuthor(author);
        return s != null;
    }

    @Override
    public CrawlerValidCountVO getCrawlerValidCount(String person) {
        return crawlerMapper.getCrawlerValidCount(person);
    }
}
