package top.saybook.task;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import top.saybook.CacheKey;
import top.saybook.crawlerutil.CrawlerUtil;
import top.saybook.entity.WebUrl;
import top.saybook.mapper.BookMapper;
import top.saybook.mapper.ChapterMapper;
import top.saybook.pojo.Book;
import top.saybook.pojo.Chapter;
import top.saybook.service.impl.BookServiceImpl;

import java.util.List;
import java.util.Map;
import java.util.Set;

@Component
public class TaskCommon {

    @Autowired(required = false)
    private BookMapper bookMapper;
    @Autowired(required = false)
    private ChapterMapper chapterMapper;
    @Autowired
    private RedisTemplate redisTemplate;

    /**
     * 更新书籍
     */
    @Scheduled(cron = "0 0 0 * * ?")
    public void taskBook() {
        List<Book> dbBookList = bookMapper.selectAll();
        List<Map> mapBookList = CrawlerUtil.crawlerBook(WebUrl.BIQUGE_URL, WebUrl.BIQUGEBOOKLIST);
        List<Book> crawlerBookLost = new BookServiceImpl().mapListToBookList(mapBookList);

        crawlerBookLost.removeAll(dbBookList);

        bookMapper.insertList(crawlerBookLost); // 求两个list的差集，注意需要重写equale方法和hashcode方法

        crawlerBookLost.forEach((book) -> {
            redisTemplate.boundHashOps(CacheKey.BOOK).put(book.getId(), book);
        });

    }

    /**
     * 更新章节
     *  思路：
     *      从redis中拿取所有已经有人看过的小说id
     *      遍历
     *          根据booid拿取每一个book对象
     *          从缓存中拿取当前book的所有章节
     *          从网站爬取当前book的所有章节
     *          求差集，网站的为准
     *          差集保存
     */
    @Scheduled(cron = "0 20 0 * * ?")
    public void taskBookChapter() {
        Set bookIdS = redisTemplate.boundHashOps(CacheKey.CHAPTER).keys();

        bookIdS.forEach((bookId) -> {

            Book book = (Book) redisTemplate.boundHashOps(CacheKey.BOOK).get(bookId);

            List<Chapter> redisChapterList = (List<Chapter>) redisTemplate.boundHashOps(CacheKey.CHAPTER).get(book.getId());

            List<Map> mapChapterList = CrawlerUtil.crawlerBookChater(book.getLinkUrl(), WebUrl.BIQUGEBOOKCHAPTERLIST);

            List<Chapter> chapterList = new BookServiceImpl().mapChapterListToChapterList(mapChapterList, book.getId());
            List<Chapter> dbcharList = chapterList;

            chapterList.removeAll(redisChapterList);

            if(chapterList.size()!=0){
                chapterMapper.insertList(chapterList);

                redisTemplate.boundHashOps(CacheKey.CHAPTER).delete(book.getId());

                redisTemplate.boundHashOps(CacheKey.CHAPTER).put(book.getId(), dbcharList);
            }

        });
    }


}
