package ltd.hxya.novel.crawl.controller;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import lombok.extern.slf4j.Slf4j;
import ltd.hxya.novel.common.bean.Result;
import ltd.hxya.novel.common.config.ThreadPoolConfig;
import ltd.hxya.novel.common.constant.redis.RedisConstant;
import ltd.hxya.novel.common.to.book.BookTo;
import ltd.hxya.novel.common.utils.RedisUtils;
import ltd.hxya.novel.common.vo.BookIndexVo;
import ltd.hxya.novel.common.vo.SearchParam;
import ltd.hxya.novel.crawl.bean.ChapterInfo;
import ltd.hxya.novel.crawl.bean.CrawlTask;
import ltd.hxya.novel.crawl.bean.CrawlVo;
import ltd.hxya.novel.crawl.service.CrawlTaskService;
import ltd.hxya.novel.crawl.service.ICrawlSourceService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;

import java.awt.print.Book;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

@RequestMapping("/crawl/task")
@RestController
@Slf4j
public class CrawlTaskController {

    @Autowired
    private CrawlTaskService crawlTaskService;

    @Autowired
    private ThreadPoolConfig threadPoolConfig;

    @Autowired
    private ICrawlSourceService crawlSourceService;

    @Autowired
    private RedisUtils redisUtils;

    /**
     * 根据用户的搜索信息爬取小说内容
     */
    @GetMapping("/crawlNovel/{crawlBookId}")
    public Result crawlNovel(@PathVariable("/crawlBookId") String crawlBookId,Integer crawlId) throws IOException, ExecutionException, InterruptedException {
        //查询出小说基本信息，并插入到数据库中
        crawlTaskService.novelInfo(crawlBookId,crawlId);
        return Result.success();
    }

    /**
     * 根据关键字和爬虫源信息，爬取小说列表
     */
    @GetMapping("/crawlNovelList")
    public Result<List<BookTo>> crawlNovelList(Integer crawlId,String keyword) throws IOException, ExecutionException, InterruptedException {
        List<BookTo> bookTos =  crawlTaskService.crawlNovelList(crawlId,keyword);
        return Result.success(bookTos);
    }

    //根据小说crawlBookId和crawlSourceId查询出所有的索引信息
    @GetMapping("/crawlIndexList")
    public Result<List<ChapterInfo>> crawlIndexList(BookTo bookTo) throws IOException {
        List<ChapterInfo> chapterInfos = crawlTaskService.crawlIndexList(bookTo);
        return Result.success(chapterInfos);
    }

    @GetMapping("/crawlNovel")
    public Result crawlNovel(String bookId) throws IOException {
        crawlTaskService.crawlNovelByCrawlId(bookId);
        return Result.success();
    }

    @GetMapping("/crawlNovelById")
    public Result crawlNovelById(String bookId,Integer sourceId) throws IOException {
        crawlTaskService.crawlNovelByCrawlId(bookId,sourceId);
        return Result.success();
    }

    //根据小说内容href、索引、小说id，crawlSourceId查询出相关的信息
    @PutMapping("/addBookIndex")
    public Result addBookIndex(@RequestBody CrawlTask crawlTask){
        crawlTaskService.addBookIndex(crawlTask);
        return Result.success();
    }



    //从网站中爬取所有可用的url，并更新小说信息
    @GetMapping("/allAvailableBookId")
    public Result<Map<String,List<String>>> allAvailableBookId() throws IOException, InterruptedException {
        Date startTime = new Date();
        List<CrawlVo> crawlVoList = crawlSourceService.crawlSourceList();
        Map<String,List<String>> crawlUrlMap = new HashMap<>();
        ThreadPoolExecutor threadPoolExecutor = threadPoolConfig.threadPoolExecutor();

        for (CrawlVo crawlVo : crawlVoList) {
            CompletableFuture<Void> completableFuture = CompletableFuture.runAsync(() -> {
                try {
                    crawlTaskService.allAvailableUrl(crawlVo,crawlUrlMap);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }, threadPoolExecutor);
            completableFuture.exceptionally(throwable -> {
                throwable.printStackTrace();
                return null;
            });
        }
        while (true){
            if (threadPoolExecutor.isTerminated()){
                break;
            }
            Thread.sleep(10000);
        }

        Date endTime = new Date();
        log.info("共使用时间为："+(endTime.getTime()-startTime.getTime())+"毫秒");
        return Result.success(crawlUrlMap);
    }

    /**
     * 添加在章節中需要排除的章節的關鍵字
     */
    @PutMapping("/addFilterWord")
    public Result addChapterFilterWord(String keyword){
        //獲取Redis中的信息
        String oldKeyword = redisUtils.get(RedisConstant.CHAPTER_FILTER_KEYWORD_KEY);
        Set<String> keywordSet = new HashSet<>();
        keywordSet.add(keyword);
        if (StringUtils.isEmpty(oldKeyword)){
            redisUtils.save(RedisConstant.CHAPTER_FILTER_KEYWORD_KEY,keywordSet);
            return Result.success();
        }
        List<String> oldKeywordList = JSON.parseObject(oldKeyword, new TypeReference<List<String>>() {
        });
        keywordSet.addAll(oldKeywordList);
        redisUtils.save(RedisConstant.CHAPTER_FILTER_KEYWORD_KEY,keywordSet);
        return Result.success();
    }

    @GetMapping("/filterWord")
    public Result<Set<String>> filterWords(){
        String json = redisUtils.get(RedisConstant.CHAPTER_FILTER_KEYWORD_KEY);
        Set<String> wordList = JSON.parseObject(json, new TypeReference<Set<String>>() {});
        return Result.success(wordList);
    }

    @PostMapping("/updateFilterWord")
    public Result updateFilterWord(@RequestBody String body){
        List<String> wordList = JSON.parseObject(body, new TypeReference<List<String>>() {
        });
        redisUtils.save(RedisConstant.CHAPTER_FILTER_KEYWORD_KEY,wordList);
        return Result.success();
    }
}
