package com.bugs.processor;

import com.bugs.reptile.Parser;
import com.model.JobInfo;
import com.model.WorkBug;
import com.service.JobInfoService;
import com.utils.GaodeUtils;
import com.utils.SpringContextUtils;
import org.assertj.core.util.Strings;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.selector.Html;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class BaseProcessor implements PageProcessor {
    // 解析器
    protected Parser reptile;
    // URL类型Map
    protected Map<String, PageType> pagesType = new HashMap();
    // 任务对象
    protected WorkBug workBug;

    private int retryTime = 3;
    private int sleepTIme = 1000;

    public BaseProcessor setRetryTime(int retryTime) {
        this.retryTime = retryTime;
        return this;
    }

    public BaseProcessor setSleepTIme(int sleepTIme) {
        this.sleepTIme = sleepTIme;
        return this;
    }


    public BaseProcessor(Parser reptile, WorkBug workBug) {
        this.reptile = reptile;
        this.workBug = workBug;
        this.pagesType.put(workBug.getStartUrl(), PageType.Pagination);
    }

    @Override
    public Site getSite() {
        return Site.me().setRetryTimes(retryTime).setSleepTime(sleepTIme).addHeader("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36");
    }

    @Override
    public void process(Page page) {
        PageType pageType = pagesType.remove(page.getRequest().getUrl());
        Html html = page.getHtml();
        // 如果是分页进入分页处理, 如果是目标页面进入就解析HTML成JobInfo对象, 获取经纬度, 入库
        if (pageType == PageType.Pagination) {
            this.processPaginationPage(page, html);
        } else if (pageType == PageType.DetailPage) {
            this.processDetailPage(page, html);
        }
    }

    private void addPagination(Page page, String url) {
        if (Strings.isNullOrEmpty(url)) return;
        pagesType.put(url, PageType.Pagination);
        page.addTargetRequest(url);

    }

    private void addDetailPage(Page page, String url) {
        if (Strings.isNullOrEmpty(url)) return;
        pagesType.put(url, PageType.DetailPage);
        page.addTargetRequest(url);
    }

    protected void setLngLatAndSave(JobInfo jobInfo) {
        // TODO 如果本来就有这个数据, 可以先不获取经纬度, 只对新增数据获取金纬度(解析服务器额度)
        GaodeUtils.setJobInfoLngLat(jobInfo);
        // 没有经纬度的数据就抛弃
        if (jobInfo.getLng() == null || jobInfo.getLat() == null) return;
        // 保存到数据库
        saveDataBase(jobInfo);
    }

    private void saveDataBase(JobInfo jobInfo) {
        try {
            JobInfoService workBugService = SpringContextUtils.getBean(JobInfoService.class);
            workBugService.save(jobInfo);
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }


    private void processPaginationPage(Page page, Html html) {
        // 解析url
        // TODO 这里size == 0
        List<String> detailUrls = reptile.getDetailUrls(html);
        // 添加详情页url
        if (detailUrls != null || detailUrls.size() > 0) {
            detailUrls.forEach((url) -> addDetailPage(page, url));
        }
        // 添加下一页url
        if (reptile.hasNextPage(html)) {
            addPagination(page, reptile.nextPageUrl(html, page.getUrl().toString()));
        }
    }

    private void processDetailPage(Page page, Html html) {
        // 解析jobInfo
        JobInfo jobInfo = reptile.convertJobInfo(html);
        jobInfo.setCaptureUrl(page.getRequest().getUrl());
        jobInfo.setWorkBugId(workBug.getId());
        jobInfo.setReptileClass(workBug.getReptileClass());
        jobInfo.setJobType(workBug.getType());
        jobInfo.createHashcode();
        // 获取经纬度后保存到数据库
        this.setLngLatAndSave(jobInfo);

    }
}
