package com.uni.crawl.job;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.core.util.URLUtil;
import cn.hutool.crypto.digest.DigestUtil;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.uni.crawl.common.enums.crawl.CrawlQueueStatusEnum;
import com.uni.crawl.common.enums.crawl.CrawlQueueUrlTypeEnum;
import com.uni.crawl.common.enums.crawl.CrawlTemplateBuildInParamEnum;
import com.uni.crawl.common.util.crawl.XsltTemplatePipline;
import com.uni.crawl.common.util.crawl.XsltTemplatePiplineBuilder;
import com.uni.crawl.common.util.crawl.XsltTemplatePiplineProcessor;
import com.uni.crawl.model.entity.crawl.CrawlData;
import com.uni.crawl.model.entity.crawl.CrawlQueue;
import com.uni.crawl.model.entity.crawl.CrawlTemplate;
import com.uni.crawl.service.crawl.CrawlDataService;
import com.uni.crawl.service.crawl.CrawlInstanceService;
import com.uni.crawl.service.crawl.CrawlQueueService;
import com.uni.crawl.service.crawl.CrawlTemplateService;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.handler.annotation.XxlJob;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;

import java.net.URL;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;

@Component
@Slf4j
@AllArgsConstructor
public class CrawlDataResolverJob extends AbstractJob {

    private final CrawlTemplateService crawlTemplateService;
    private final CrawlInstanceService crawlInstanceService;
    private final CrawlQueueService crawlQueueService;
    private final CrawlDataService crawlDataService;

    @XxlJob("crawlDataResolverJob")
    @Override
    public ReturnT<String> handle() {
        LambdaQueryWrapper<CrawlQueue> wp = Wrappers.lambdaQuery();
        wp.eq(CrawlQueue::getCrawlStatus, CrawlQueueStatusEnum.FINISH.getStatus());

        List<CrawlQueue> queueList = crawlQueueService.list(wp);
        log.info("need resolve queue size = {}", queueList.size());

        for (CrawlQueue queue : queueList) {
            CrawlData data = BeanUtil.copyProperties(queue, CrawlData.class);
            data.setId(null);
            data.setCreateTime(LocalDateTime.now());
            data.setQueueId(queue.getId());
            if (StrUtil.isNotEmpty(queue.getHttpResponse())) {
                doHandle(queue, data);
            }

            crawlQueueService.setQueueStatus(queue.getId(), CrawlQueueStatusEnum.RESOLVED);
        }

        return success();
    }

    private void doHandle(CrawlQueue queue, CrawlData data) {
        try {
            CrawlTemplate tmpl = crawlTemplateService.getByTmplCode(queue.getTmplCode());
            String seedUrl = tmpl.getSeedUrl();
            String html = queue.getHttpResponse();

            if (CrawlQueueUrlTypeEnum.LIST.getType().equals(queue.getUrlType())) {
                XsltTemplatePipline pagingListPipline = XsltTemplatePiplineBuilder.create().seedUrl(seedUrl).tmpl(tmpl.getPagingListTmpl()).build();
                Object dataList = XsltTemplatePiplineProcessor.process(pagingListPipline, html);
                enqueueDataList(queue, dataList);

                XsltTemplatePipline pagingSelectorPipline = XsltTemplatePiplineBuilder.create().seedUrl(seedUrl).tmpl(tmpl.getPagingSelectorTmpl()).build();
                Object pagingSelector = XsltTemplatePiplineProcessor.process(pagingSelectorPipline, html);
                enqueuePagingList(tmpl, queue, pagingSelector);

            } else {
                XsltTemplatePipline pageDetailPipline = XsltTemplatePiplineBuilder.create().seedUrl(seedUrl).tmpl(tmpl.getPageDetailTmpl()).build();
                Object pageDetail = XsltTemplatePiplineProcessor.process(pageDetailPipline, html);

                data.setDataJson(pageDetail.toString());
                crawlDataService.save(data);
            }
            crawlInstanceService.calculateStatus(queue.getInstanceId());

        } catch (Exception e) {
            log.error("处理失败", e);
            crawlQueueService.setQueueStatus(queue.getId(), CrawlQueueStatusEnum.RESOLVED_ERR);
        }
    }

    private void enqueueDataList(CrawlQueue queue, Object dataList) {
        if (dataList instanceof JSONObject && ((JSONObject) dataList).containsKey(CrawlTemplateBuildInParamEnum.DATA_LIST.getParam())) {
            JSONArray dataArray = ((JSONObject) dataList).getJSONArray(CrawlTemplateBuildInParamEnum.DATA_LIST.getParam());

            for (Object dataItem : dataArray) {
                JSONObject json = (JSONObject) dataItem;
                String detailUrl = json.getStr(CrawlTemplateBuildInParamEnum.DETAIL_URL.getParam());
                if (StrUtil.isEmpty(detailUrl)) continue;
                queue.setTitle(json.getStr(CrawlTemplateBuildInParamEnum.TITLE.getParam()));
                enqueue(queue, detailUrl, CrawlQueueUrlTypeEnum.DETAIL);
            }
        }
    }

    private void enqueue(CrawlQueue queue, String detailUrl, CrawlQueueUrlTypeEnum urlTypeEnum) {
        CrawlQueue queueNew = copyQueue(queue);
        queueNew.setUrl(getCompleteUrl(queue.getUrl(), detailUrl));
        queueNew.setUrlMd5(DigestUtil.md5Hex(detailUrl));
        queueNew.setUrlType(urlTypeEnum.getType());
        queueNew.setCrawlStatus(CrawlQueueStatusEnum.INIT.getStatus());
        queueNew.setHttpStatus(null);
        if (crawlQueueService.isExists(queueNew.getInstanceId(), queueNew.getUrlMd5())) {
            return;
        }
        crawlQueueService.save(queueNew);
        crawlInstanceService.increaseTotal(queue.getInstanceId(), 1);
        log.info("enqueue tmplCode = {}, instanceId = {}, url = {}", queue.getTmplCode(), queue.getInstanceId(), detailUrl);
    }

    private String getCompleteUrl(String seedUrl, String newUrl) {
        if (StrUtil.startWith(newUrl, "http")) {
            return newUrl;
        }
        URL url = URLUtil.url(seedUrl);
        return String.format("%s://%s%s", url.getProtocol(), url.getHost(), newUrl);
    }

    private void enqueuePagingList(CrawlTemplate tmpl, CrawlQueue queue, Object pagingSelector) {
        if (pagingSelector instanceof JSONObject && ((JSONObject) pagingSelector).containsKey(CrawlTemplateBuildInParamEnum.PAGING_LIST.getParam())) {
            JSONArray pageArray = ((JSONObject) pagingSelector).getJSONArray(CrawlTemplateBuildInParamEnum.PAGING_LIST.getParam());

            List<CrawlQueue> queueNewList = new ArrayList<>();
            for (Object pageItem : pageArray) {
                String pageNum = ((JSONObject) pageItem).getStr(CrawlTemplateBuildInParamEnum.PAGE_NUM.getParam());
                String pageUrl = ((JSONObject) pageItem).getStr(CrawlTemplateBuildInParamEnum.PAGE_URL.getParam());

                if (StrUtil.isEmpty(pageNum) || StrUtil.isEmpty(pageUrl) || !NumberUtil.isNumber(pageNum)) continue;
                if (Integer.parseInt(pageNum) > tmpl.getPagingLimit()) {
                    break;
                }
                enqueue(queue, pageUrl, CrawlQueueUrlTypeEnum.LIST);
            }

            log.info("enqueue tmplCode = {}, instanceId = {}, total = {}", queue.getTmplCode(), queue.getInstanceId(), queueNewList.size());
            crawlQueueService.saveBatch(queueNewList);
        }
    }

    private CrawlQueue copyQueue(CrawlQueue queue) {
        CrawlQueue queueDetail = BeanUtil.toBean(queue, CrawlQueue.class);
        queueDetail.setCrawlStatus(CrawlQueueStatusEnum.INIT.getStatus());
        queueDetail.setId(null);
        queueDetail.setHttpResponse(null);
        queueDetail.setCreateTime(LocalDateTime.now());
        return queueDetail;
    }
}
