package com.cybertron.am.wordpress.job;

import com.cybertron.am.wordpress.domain.*;
import com.cybertron.am.wordpress.persistence.*;
import com.cybertron.am.wordpress.util.HttpUtil;
import com.cybertron.am.wordpress.util.UrlUtil;
import com.ne.boot.common.util.JsonUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;

import javax.annotation.PostConstruct;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Slf4j
@Component
public class ParserAdapter {
    @Autowired
    private ApplicationContext applicationContext;
    @Autowired
    private CrawlSeedMapper crawlSeedMapper;
    @Autowired
    private CrawlCategoryMapper categoryMapper;
    @Autowired
    private CrawlVisitMapper visitMapper;
    @Autowired
    private CrawlQueueMapper crawlQueueMapper;
    @Autowired
    private CrawlProductMapper crawlProductMapper;
    private Map<String, Parser> parsers = new HashMap<>();

    @PostConstruct
    public void init() {
        Map<String, Parser> beansOfType = applicationContext.getBeansOfType(Parser.class);
        beansOfType.values()
                .forEach(parser -> parsers.put(parser.domain(), parser));
        log.debug("parser init success size {}", parsers.size());
    }


    public void crawl(CrawlQueue queue) {
        if (isVisited(queue.getUrl())) {
            return;
        }
        log.info("开始解析{}", JsonUtil.writeValueAsString(queue));
        String domain = UrlUtil.getDomain(queue.getUrl());
        String html = HttpUtil.get(queue.getUrl());
        Document document = Jsoup.parse(html);
        CrawlResult crawlResult = CrawlResult.builder()
                .domain(domain)
                .html(html)
                .document(document)
                .url(queue.getUrl())
                .build();
        Parser parser = parsers.get(domain);
        CrawlSeed crawlSeed = crawlSeedMapper.selectById(DigestUtils.md5Hex(domain));
        switch (queue.getType()) {
            case SEED:
                List<CrawlCategory> categories = parser.getCategory(crawlResult);
                for (CrawlCategory category : categories) {
                    category.setSeedId(crawlSeed.getId());
                    CrawlCategory exists = categoryMapper.selectById(category.getId());
                    if (exists == null) {
                        categoryMapper.insert(category);
                    }
                    if (StringUtils.isNotBlank(category.getUrl())) {
                        putQueue(category.getUrl(), CrawlQueue.Type.CATEGORY, queue.getUrl());
                    }
                }
                break;
            case CATEGORY:
            case NEXT_PAGE:
                List<String> productUrls = parser.getProductUrl(crawlResult);
                String refer = queue.getType() == CrawlQueue.Type.CATEGORY ? queue.getUrl() : queue.getRefer();
                if (CollectionUtils.isNotEmpty(productUrls)) {
                    for (String url : productUrls) {
                        putQueue(url, CrawlQueue.Type.PRODUCT, refer);
                    }
                }
                List<String> nextPageUrls = parser.getNextPageUrl(crawlResult);
                if (CollectionUtils.isNotEmpty(nextPageUrls)) {
                    for (String url : nextPageUrls) {
                        putQueue(url, CrawlQueue.Type.NEXT_PAGE, refer);
                    }
                }
                break;
            case PRODUCT:
                CrawlProduct product = parser.getProduct(crawlResult);
                if (product == null) {
                    break;
                }
                CrawlProduct exists = crawlProductMapper.selectById(DigestUtils.md5Hex(queue.getUrl()));
                CrawlCategory category = categoryMapper.selectById(DigestUtils.md5Hex(queue.getRefer()));
                if (exists == null) {
                    product.setSeedId(crawlSeed.getId());
                    product.setCategoryId(category.getId());
                    crawlProductMapper.insert(product);
                }
                break;
            default:
                break;
        }
    }

    private synchronized boolean isVisited(String url) {
        String id = DigestUtils.md5Hex(url);
        return null != visitMapper.selectById(id);
    }

    private synchronized void putQueue(String url, CrawlQueue.Type type, String refer) {
        CrawlQueue exists = crawlQueueMapper.selectById(DigestUtils.md5Hex(url));
        if (exists != null) {
            return;
        }
        int priority = 0;
        switch (type) {
            case SEED:
                break;
            case CATEGORY:
                priority = 1;
                break;
            case NEXT_PAGE:
                priority = 2;
                break;
            case PRODUCT:
                priority = 3;
                break;
            default:
                break;
        }
        crawlQueueMapper.insert(CrawlQueue.builder()
                .id(DigestUtils.md5Hex(url))
                .url(url)
                .type(type)
                .refer(refer)
                .priority(priority)
                .build());
    }

    @Transactional(rollbackFor = Throwable.class)
    public void visit(CrawlQueue queue) {
        CrawlVisit exists = visitMapper.selectById(DigestUtils.md5Hex(queue.getUrl()));
        if (exists == null) {
            visitMapper.insert(CrawlVisit.builder()
                    .id(DigestUtils.md5Hex(queue.getUrl()))
                    .seedId(DigestUtils.md5Hex(UrlUtil.getDomain(queue.getUrl())))
                    .url(queue.getUrl())
                    .build());
        }
        crawlQueueMapper.deleteById(queue.getId());
    }
}
