package com.xiezc.fetch.spider.biquge;

import com.xiezc.common.entity.BookInfo;
import com.xiezc.common.result.ListResult;
import com.xiezc.common.result.PageResult;
import com.xiezc.fetch.consumer.SchedualDaoService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.scheduler.QueueScheduler;

import javax.annotation.Resource;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;

@Slf4j
@Service
public class BiqugeService {

    @Resource
    BiqugePipeline biqugePipeline;

    @Resource
    SchedualDaoService schedualDaoService;


    public void downloadBookInfoStart(String[] urls) {

        ListResult<String> allUrls = schedualDaoService.getAllBookInfoUrls();
        Set<String> collect = allUrls.getContent().parallelStream().collect(Collectors.toSet());
        MyHashSetDuplicateRemover myHashSetDuplicateRemover = new MyHashSetDuplicateRemover();
        myHashSetDuplicateRemover.addUrls(collect);
        QueueScheduler queueScheduler = new QueueScheduler();
        queueScheduler.setDuplicateRemover(myHashSetDuplicateRemover);
        MySpider.create(new BiqugeBookInfoProcess())
                //"data/spider/fileCacheQueueScheduler"
                .setScheduler(queueScheduler)
                .addPipeline(biqugePipeline)
                .addUrl(urls)
                //.addUrl("http://m.biquge.com.tw/top/allvisit_1.html")
                .thread(6)
                .runAsync();

    }


    public void downloadChapter() {
        PageResult<BookInfo> pageBookInfo = schedualDaoService.getPageBookInfo(1, 50);
        List<BookInfo> records = pageBookInfo.getRecords();
        long pages = pageBookInfo.getTotalPage();
        long current = pageBookInfo.getCurrent();
        long size = pageBookInfo.getPageSize();

        QueueScheduler queueScheduler = new QueueScheduler();
        MyHashSetDuplicateRemover myHashSetDuplicateRemover = new MyHashSetDuplicateRemover();

        queueScheduler.setDuplicateRemover(myHashSetDuplicateRemover);
        while (current <= pages) {

            Request[] requests = records.parallelStream()
                    .map(bookInfo -> {
                        Request request = new Request(bookInfo.getFetchUrl());
                        request.putExtra("bookInfo", bookInfo);
                        return request;
                    }).collect(Collectors.toList()).toArray(new Request[1]);


            for (Request request : requests) {
                //获得当前小说的所有已经存在的章节
                BiqugeChapterProcess biqugeProcess = new BiqugeChapterProcess();
                BookInfo bookInfo = (BookInfo) request.getExtra("bookInfo");
                ListResult<String> allByBookId = schedualDaoService.getAllByBookId(bookInfo.getId());
                //先清空
                myHashSetDuplicateRemover.resetDuplicateCheck(null);
                //再放入
                myHashSetDuplicateRemover.addUrls(allByBookId.getContent());

                MySpider.create(biqugeProcess)
                        //"data/spider/fileCacheQueueScheduler"
                        .setScheduler(queueScheduler)
                        .addPipeline(biqugePipeline)
                        .addRequest(request)
                        //.addUrl("http://m.biquge.com.tw/top/allvisit_1.html")
                        .thread(6)
                        .run();
            }


            if (current >= pages) {
                return;
            }
            current++;
            pageBookInfo = schedualDaoService.getPageBookInfo((int) current, (int) size);
            if (!pageBookInfo.getSuccess()) {
                continue;
            }
            records = pageBookInfo.getRecords();
            pages = pageBookInfo.getPageSize();
        }


    }

}
