package com.fenqing.comics.service.impl.crawler;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.net.url.UrlBuilder;
import cn.hutool.core.net.url.UrlQuery;
import cn.hutool.http.GlobalHeaders;
import cn.hutool.http.HttpRequest;
import cn.hutool.http.HttpResponse;
import cn.hutool.http.HttpUtil;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.fenqing.comics.annotations.CrawlerService;
import com.fenqing.comics.commons.Constant;
import com.fenqing.comics.dao.*;
import com.fenqing.comics.entity.*;
import com.fenqing.comics.enums.CrawlerTaskHistoryEnums;
import com.fenqing.comics.enums.SourceEnums;
import com.fenqing.comics.service.IAsyncService;
import com.fenqing.comics.service.ICrawlerService;
import com.fenqing.comics.utils.MapUtils;
import com.fenqing.comics.utils.StrUtils;
import lombok.Cleanup;
import lombok.Setter;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;

import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;

/**
 * 包子漫画爬虫服务
 *
 * @author fenqing
 */
@CrawlerService(SourceEnums.Code.BAO_ZI_MAN_HUA)
@Setter(onMethod_ = @Autowired)
@Slf4j
public class BaoZiCrawlerServiceImpl implements ICrawlerService {

    private ICrawlerTaskHistoryDao iCrawlerTaskHistoryDao;

    private IAsyncService iAsyncService;

    private ISourceDao iSourceDao;

    private ICountryDao iCountryDao;

    private ICategoryDao iCategoryDao;

    private IAuthorDao iAuthorDao;

    @Override
    public void start() {
        List<Runnable> list = Arrays.asList(
                this::startCountryCrawler,
                this::startCategoryCrawler,
                this::startAuthorAndComicsCrawler
        );
        List<Void> collect = list.stream()
                .map(this.iAsyncService::asyncMethod)
                .map(voidFuture -> {
                    try {
                        return voidFuture.get();
                    } catch (InterruptedException | ExecutionException e) {
                        throw new RuntimeException(e);
                    }
                })
                .toList();

        log.info("执行完毕，共执行{}个任务", collect.size());
    }

    @Transactional
    public void startCountryCrawler() {
        // 查询来源表，拿到来源ID
        SourceEntity source = this.iSourceDao.getByCode(SourceEnums.Code.BAO_ZI_MAN_HUA);
        // 判断是否存在
        if (source == null) {
            return;
        }
        // 根据code查询历史表
        CrawlerTaskHistoryEntity crawlerTaskHistory =
                this.iCrawlerTaskHistoryDao.getLastBySourceId(source.getId(), CrawlerTaskHistoryEnums.Type.BAOZI_COUNTRY);
        //判断是否存在
        if (crawlerTaskHistory == null) {
            this.startCountryCrawlerBody(source.getId(), null);
            return;
        }
        LocalDateTime nextTime = crawlerTaskHistory.getNextTime();
        // 判断是否可以执行
        if (nextTime.isAfter(LocalDateTime.now())) {
            return;
        }
        this.startCountryCrawlerBody(source.getId(), crawlerTaskHistory);
    }

    @Transactional
    public void startCategoryCrawler() {
        // 查询来源表，拿到来源ID
        SourceEntity source = this.iSourceDao.getByCode(SourceEnums.Code.BAO_ZI_MAN_HUA);
        // 判断是否存在
        if (source == null) {
            return;
        }
        // 根据code查询历史表
        CrawlerTaskHistoryEntity crawlerTaskHistory =
                this.iCrawlerTaskHistoryDao.getLastBySourceId(source.getId(), CrawlerTaskHistoryEnums.Type.BAOZI_CATEGORY);
        //判断是否存在
        if (crawlerTaskHistory == null) {
            this.startCategoryCrawlerBody(source.getId(), null);
            return;
        }
        LocalDateTime nextTime = crawlerTaskHistory.getNextTime();
        // 判断是否可以执行
        if (nextTime.isAfter(LocalDateTime.now())) {
            return;
        }
        this.startCategoryCrawlerBody(source.getId(), crawlerTaskHistory);
    }

    @Transactional
    public void startAuthorAndComicsCrawler() {
        // 查询来源表，拿到来源ID
        SourceEntity source = this.iSourceDao.getByCode(SourceEnums.Code.BAO_ZI_MAN_HUA);
        // 判断是否存在
        if (source == null) {
            return;
        }
        // 根据code查询历史表
        CrawlerTaskHistoryEntity crawlerTaskHistory =
                this.iCrawlerTaskHistoryDao.getLastBySourceId(source.getId(), CrawlerTaskHistoryEnums.Type.BAOZI_AUTHOR_BOOK);
        if (crawlerTaskHistory == null) {
            this.startAuthorAndComicsCrawlerBody(source.getId(),
                    null,
                    Constant.DEFAULT_PAGE_NUM, Constant.DEFAULT_PAGE_SIZE);
            return;
        }
        LocalDateTime nextTime = crawlerTaskHistory.getNextTime();
        if (nextTime.isAfter(LocalDateTime.now())) {
            return;
        }
        String ext = crawlerTaskHistory.getExt();
        JSONObject extJson = JSONUtil.parseObj(ext);
        Boolean isEnd = extJson.getBool(Constant.EXT_IS_END);
        if (isEnd) {
            this.startAuthorAndComicsCrawlerBody(source.getId(),
                    null,
                    Constant.DEFAULT_PAGE_NUM, Constant.DEFAULT_PAGE_SIZE);
            return;
        }
        Integer page = extJson.getInt(Constant.EXT_PAGE);
        this.startAuthorAndComicsCrawlerBody(source.getId(), crawlerTaskHistory, page + 1, Constant.DEFAULT_PAGE_SIZE);
    }

    public void startAuthorAndComicsCrawlerBody(Long sourceId, CrawlerTaskHistoryEntity lastCrawlerTaskHistory,
                                                int page, int size) {
        Map<String, String> map = new HashMap<>();
        map.put("type", "all");
        map.put("region", "all");
        map.put("filter", "*");
        map.put("page", String.valueOf(page));
        map.put("limit", String.valueOf(size));
        map.put("language", "cn");
        map.put("__amp_source_origin", "https%3A%2F%2Fcn.baozimh.com");
        UrlQuery query = UrlQuery.of(map);
        String url = UrlBuilder.of("https://cn.baozimh.com/api/bzmhq/amp_comic_list")
                .setQuery(query)
                .build();
        // hutool 添加query
        HttpRequest httpRequest = HttpUtil.createGet(url);
        @Cleanup HttpResponse execute = httpRequest.execute();
        String bodyJson = execute.body();
        JSONObject bodyJsonObject = JSONUtil.parseObj(bodyJson);
        JSONArray items = bodyJsonObject.getJSONArray("items");
        boolean haveUpdate = false;
        for (int i = 0; i < items.size(); i++) {
            JSONObject item = items.getJSONObject(i);
            String author = item.getStr("author");
            List<String> authorList = StrUtils.streamHandles(author, Constant.BZ_AUTHOR_HANDLES);
            for (String authorItem : authorList) {
                if (this.updateAuthor(authorItem)) {
                    haveUpdate = true;
                }
            }
        }
        CrawlerTaskHistoryEntity crawlerTaskHistory = new CrawlerTaskHistoryEntity();
        crawlerTaskHistory.setSourceId(sourceId);
        crawlerTaskHistory.setType(CrawlerTaskHistoryEnums.Type.BAOZI_AUTHOR_BOOK);
        crawlerTaskHistory.setStatus(CrawlerTaskHistoryEnums.Status.SUCCESS);
        crawlerTaskHistory.setHaveUpdate(haveUpdate);
        crawlerTaskHistory.setTime(LocalDateTime.now());
        crawlerTaskHistory.setNextTime(this.getAuthorAndBookNextTime());
        crawlerTaskHistory.setExt(this.getAuthorAndBookExt(lastCrawlerTaskHistory, page, items.size(), haveUpdate));
        this.iCrawlerTaskHistoryDao.save(crawlerTaskHistory);
    }

    private String getAuthorAndBookExt(CrawlerTaskHistoryEntity lastCrawlerTaskHistory,
                                       int page,
                                       int itemSize,
                                       boolean haveUpdate) {
        if (haveUpdate) {
            Map<String, Object> ext = new HashMap<>();
            ext.put(Constant.EXT_NO_UPDATE, 0);
            ext.put(Constant.EXT_PAGE, page);
            ext.put(Constant.EXT_IS_END, itemSize <= 0);
            return JSONUtil.toJsonStr(ext);
        }

        if (lastCrawlerTaskHistory == null) {
            Map<String, Object> ext = new HashMap<>();
            ext.put(Constant.EXT_NO_UPDATE, 1);
            ext.put(Constant.EXT_PAGE, page);
            ext.put(Constant.EXT_IS_END, itemSize <= 0);
            return JSONUtil.toJsonStr(ext);
        }

        String ext = lastCrawlerTaskHistory.getExt();
        JSONObject extJson = JSONUtil.parseObj(ext);
        Integer noUpdates = extJson.getInt(Constant.EXT_NO_UPDATE);
        noUpdates++;
        extJson.set(Constant.EXT_NO_UPDATE, noUpdates);
        extJson.set(Constant.EXT_PAGE, page);
        extJson.set(Constant.EXT_IS_END, itemSize <= 0);
        return extJson.toJSONString(0);
    }

    private boolean updateAuthor(String author) {
        boolean exists = this.iAuthorDao.existsByName(author);
        if (exists) {
            return false;
        }
        AuthorEntity authorEntity = new AuthorEntity();
        authorEntity.setName(author);
        this.iAuthorDao.save(authorEntity);
        return true;
    }

    @SneakyThrows
    private void startCountryCrawlerBody(Long sourceId, CrawlerTaskHistoryEntity lastCrawlerTaskHistory) {
        String html = HttpUtil.get("https://cn.baozimh.com/classify?type=all&region=all&state=all&filter=%2a");
        Document doc = Jsoup.parse(html);
        // 获取第一个
        Elements items = doc.select(".classify-nav:first-child .nav .item");
        items = CollUtil.filter(items, node -> {
            String href = node.attr("href");
            return !href.contains("region=all");
        });
        int[] result = {0};
        // 遍历
        items.forEach(item -> {
            String name = item.text().trim();
            boolean exists = this.iCountryDao.existsByName(name);
            if (exists) {
                return;
            }
            CountryEntity country = new CountryEntity();
            country.setName(name);
            this.iCountryDao.save(country);
            result[0]++;
        });
        boolean haveUpdate = result[0] > 0;
        this.createCountryCrawlerTaskHistory(sourceId, lastCrawlerTaskHistory, haveUpdate);
    }

    @SneakyThrows
    private void startCategoryCrawlerBody(Long sourceId, CrawlerTaskHistoryEntity lastCrawlerTaskHistory) {
        Connection connect = Jsoup.newSession();
        connect.url("https://cn.baozimh.com/classify?type=all&region=all&state=all&filter=%2a");
        connect.headers(MapUtils.of(GlobalHeaders.INSTANCE.headers(), Function.identity(), list -> CollUtil.join(list, ";")));
        Document doc = connect.get();
        // 获取第三个classify-nav
        Elements items = doc.select(".classify-nav:nth-child(3) .nav .item");
        items = CollUtil.filter(items, node -> {
            String href = node.attr("href");
            return !href.contains("type=all");
        });
        log.info("items length:{}", items.size());
        int[] result = {0};
        // 遍历
        items.forEach(item -> {
            String name = item.text().trim();
            boolean exists = this.iCategoryDao.existsByName(name);
            if (exists) {
                return;
            }
            CategoryEntity category = new CategoryEntity();
            category.setName(name);
            this.iCategoryDao.save(category);
            result[0]++;
        });
        boolean haveUpdate = result[0] > 0;
        this.createCategoryCrawlerTaskHistory(sourceId, lastCrawlerTaskHistory, haveUpdate);
    }

    private void createCountryCrawlerTaskHistory(Long sourceId, CrawlerTaskHistoryEntity lastCrawlerTaskHistory, boolean haveUpdate) {
        CrawlerTaskHistoryEntity crawlerTaskHistory = new CrawlerTaskHistoryEntity();
        crawlerTaskHistory.setSourceId(sourceId);
        crawlerTaskHistory.setType(CrawlerTaskHistoryEnums.Type.BAOZI_COUNTRY);
        crawlerTaskHistory.setStatus(CrawlerTaskHistoryEnums.Status.SUCCESS);
        crawlerTaskHistory.setHaveUpdate(haveUpdate);
        crawlerTaskHistory.setTime(LocalDateTime.now());
        crawlerTaskHistory.setNextTime(getCountryNextTime(lastCrawlerTaskHistory, haveUpdate));
        crawlerTaskHistory.setExt(getCountryExt(lastCrawlerTaskHistory, haveUpdate));
        this.iCrawlerTaskHistoryDao.save(crawlerTaskHistory);
    }

    private void createCategoryCrawlerTaskHistory(Long sourceId, CrawlerTaskHistoryEntity lastCrawlerTaskHistory, boolean haveUpdate) {
        CrawlerTaskHistoryEntity crawlerTaskHistory = new CrawlerTaskHistoryEntity();
        crawlerTaskHistory.setSourceId(sourceId);
        crawlerTaskHistory.setType(CrawlerTaskHistoryEnums.Type.BAOZI_CATEGORY);
        crawlerTaskHistory.setStatus(CrawlerTaskHistoryEnums.Status.SUCCESS);
        crawlerTaskHistory.setHaveUpdate(haveUpdate);
        crawlerTaskHistory.setTime(LocalDateTime.now());
        crawlerTaskHistory.setNextTime(getCategoryNextTime(lastCrawlerTaskHistory, haveUpdate));
        crawlerTaskHistory.setExt(getCategoryExt(lastCrawlerTaskHistory, haveUpdate));
        this.iCrawlerTaskHistoryDao.save(crawlerTaskHistory);
    }

    /**
     * 如果有更新， 下一次执行时间为12个小时之后， 如果没有更新，累计更新次数 * 24小时之后
     *
     * @param lastCrawlerTaskHistory 上一次历史记录
     * @param haveUpdate             是否有更新
     * @return 下一次执行时间
     */
    public LocalDateTime getCountryNextTime(CrawlerTaskHistoryEntity lastCrawlerTaskHistory, boolean haveUpdate) {
        if (haveUpdate) {
            return LocalDateTime.now().plusHours(12);
        }

        if (lastCrawlerTaskHistory == null) {
            return LocalDateTime.now().plusHours(24);
        }
        String ext = lastCrawlerTaskHistory.getExt();
        JSONObject extJson = JSONUtil.parseObj(ext);
        Integer noUpdates = extJson.getInt(Constant.EXT_NO_UPDATE);
        return LocalDateTime.now().plusHours(noUpdates * 24);
    }

    /**
     * 如果有更新， 下一次执行时间为12个小时之后， 如果没有更新，累计更新次数 * 24小时之后
     *
     * @param lastCrawlerTaskHistory 上一次历史记录
     * @param haveUpdate             是否有更新
     * @return 下一次执行时间
     */
    public LocalDateTime getCategoryNextTime(CrawlerTaskHistoryEntity lastCrawlerTaskHistory, boolean haveUpdate) {
        return getCommonNextTime(lastCrawlerTaskHistory, haveUpdate);
    }

    public LocalDateTime getAuthorAndBookNextTime() {
        return LocalDateTime.now().plus(Constant.BZ_CRAWLER_INTERVAL, ChronoUnit.MILLIS);
    }

    private LocalDateTime getCommonNextTime(CrawlerTaskHistoryEntity lastCrawlerTaskHistory, boolean haveUpdate) {
        if (haveUpdate) {
            return LocalDateTime.now().plusHours(12);
        }

        if (lastCrawlerTaskHistory == null) {
            return LocalDateTime.now().plusHours(24);
        }
        String ext = lastCrawlerTaskHistory.getExt();
        JSONObject extJson = JSONUtil.parseObj(ext);
        int noUpdates = extJson.getInt(Constant.EXT_NO_UPDATE) + 1;
        return LocalDateTime.now().plusHours(noUpdates * 24L);
    }

    /**
     * 获取扩展
     *
     * @param lastCrawlerTaskHistory 上一次历史记录
     * @param haveUpdate             是否有更新
     * @return 下一次执行时间
     */
    public String getCountryExt(CrawlerTaskHistoryEntity lastCrawlerTaskHistory, boolean haveUpdate) {
        return getCountryOrCategoryExt(lastCrawlerTaskHistory, haveUpdate);
    }

    private String getCountryOrCategoryExt(CrawlerTaskHistoryEntity lastCrawlerTaskHistory, boolean haveUpdate) {
        if (haveUpdate) {
            Map<String, Object> ext = new HashMap<>();
            ext.put(Constant.EXT_NO_UPDATE, 0);
            return JSONUtil.toJsonStr(ext);
        }

        if (lastCrawlerTaskHistory == null) {
            Map<String, Object> ext = new HashMap<>();
            ext.put(Constant.EXT_NO_UPDATE, 1);
            return JSONUtil.toJsonStr(ext);
        }

        String ext = lastCrawlerTaskHistory.getExt();
        JSONObject extJson = JSONUtil.parseObj(ext);
        Integer noUpdates = extJson.getInt(Constant.EXT_NO_UPDATE);
        noUpdates++;
        extJson.set(Constant.EXT_NO_UPDATE, noUpdates);
        return extJson.toJSONString(0);
    }

    /**
     * 获取扩展
     *
     * @param lastCrawlerTaskHistory 上一次历史记录
     * @param haveUpdate             是否有更新
     * @return 下一次执行时间
     */
    public String getCategoryExt(CrawlerTaskHistoryEntity lastCrawlerTaskHistory, boolean haveUpdate) {
        return getCountryOrCategoryExt(lastCrawlerTaskHistory, haveUpdate);
    }

}
