/*
 * Copyright (C) GSX Techedu Inc. All Rights Reserved
 * Unauthorized copying of this file, via any medium is strictly prohibited
 * Proprietary and confidential
 */

package com.nime.novel.crawl.core.detail.impl;

import com.nime.novel.crawl.constant.ChannelEnum;
import com.nime.novel.crawl.constant.StatusEnum;
import com.nime.novel.crawl.core.detail.DetailGetService;
import com.nime.novel.crawl.domain.BookEntity;
import com.nime.novel.crawl.domain.ChapterEntity;
import com.nime.novel.crawl.domain.ContentEntity;
import com.nime.novel.crawl.domain.CrawlUrl;
import com.nime.novel.crawl.mapper.CrawlChapterMapper;
import com.nime.novel.crawl.mapper.CrawlContentMapper;
import com.nime.novel.crawl.utils.HttpUtil;
import lombok.extern.slf4j.Slf4j;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

/**
 * @author liujialiang
 * @description
 * @team wuhan operational dev.
 * @date 2020/8/30 7:10 下午
 **/
@Slf4j
@Service
public class ContentDetailGetServiceImpl extends DetailGetService<ChapterEntity> {

    @Autowired
    private CrawlChapterMapper crawlChapterMapper;
    @Autowired
    private CrawlContentMapper crawlContentMapper;

    @Override
    protected CrawlUrl getNewestUrl(Integer policyId) {
        CrawlUrl crawlUrl = crawlUrlService
            .getOldCrawlUrlByStatus(policyId, ChannelEnum.CHAPTER.getCode(), StatusEnum.TODO);
        if (null == crawlUrl) {
            log.info("频道更新完毕: channel={}", ChannelEnum.BOOK.getCode());
            return null;
        }
        return crawlUrl;
    }

    @Override
    protected ChapterEntity parseHtml(CrawlUrl crawlUrl, String html) {
        Document doc = Jsoup.parse(html);
        Elements element1s = doc.select("#chaptercontent");
        // TODO 下一页 可能有 3+ 页
        String nextPageUrl = crawlUrl.getUrl().replace(".html", "_2.html");
        String htmlText = HttpUtil.getByHttpClient(nextPageUrl);
        Document doc2 = Jsoup.parse(htmlText);
        Elements element2s = doc2.select("#chaptercontent");
        element1s.addAll(element2s);
        String content = element1s.html();
        Long crawlBookId = crawlUrl.getReferId();
        CrawlUrl bookUrl = crawlUrlService.getById(crawlBookId);
        BookEntity bookEntity = crawlBookMapper.findByCrawlBookId(bookUrl.getUrlHash());
        if (null == bookEntity) {
            // 等待爬取
            return null;
        }
        String chapterName = doc.select("#top > span").text();
        Integer wordCount = element1s.text().length();
        Integer indexNum = crawlChapterMapper.countByBookId(bookEntity.getId());

        ChapterEntity chapterEntity = ChapterEntity.builder()
            .content(content)
            .bookId(bookEntity.getId())
            .indexName(chapterName)
            .indexNum(indexNum)
            .isVip(0)
            .wordCount(wordCount)
            .build();
        return chapterEntity;
    }

    @Override
    @Transactional
    protected boolean insert(ChapterEntity entity) {
        log.info("正文入库: {}",entity.getIndexName());
        ChapterEntity old = crawlChapterMapper
            .selectByName(entity.getBookId(), entity.getIndexName());
        if (null != old) {
            return true;
        }
        crawlChapterMapper.create(entity);
        crawlContentMapper.create(ContentEntity.builder()
            .chapterId(entity.getId())
            .content(entity.getContent())
            .build());
        return true;
    }
}
