package com.wgu.crawl2.service.impl;

import com.wgu.crawl.CrawlSite;
import com.wgu.crawl.service.CrawlService;
import com.wgu.crawl.service.CrawlSiteService;
import com.wgu.crawl.util.CrawlUtil;
import com.wgu.crawl2.service.*;
import com.wgu.xbqg.XBQGBook;
import com.wgu.xbqg.XBQGChapter;
import com.wgu.xbqg.XBQGChapterDetail;
import com.wgu.xbqg.XBQGType;
import lombok.extern.slf4j.Slf4j;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import javax.transaction.Transactional;
import java.util.List;

/**
 * Created by w on 2019/6/27.
 */
@Service
@Slf4j
public class CrawlServiceImpl2 implements CrawlService2{
    @Resource
    private CrawlSiteService crawlSiteService;
    @Resource
    private XBQGBookService xbqgBookService;
    @Resource
    private XBQGTypeService xbqgTypeService;
    @Resource
    private XBQGChapterService xbqgChapterService;
    @Resource
    private XBQGChapterDetailService xbqgChapterDetailService;

    @Override
    public Object demo() {
        String url = "https://vipreader.qidian.com/chapter/1013838884/451487540";
        // 获取当前章节具体页面
        String result = CrawlUtil.crawl(url,"UTF-8");
        List<Element> detailList  = CrawlUtil.analyByRole(url,result,".read-content");
        String detailTxt = detailList.get(0).text().replaceAll("&nbsp;"," ").replaceAll("\\\\n", "\n");
        log.info(detailTxt);
        return detailTxt;
    }

    @Async
    @Override
    public void crawlChapterDetail(XBQGBook xbqgBook) {
        // 根据书籍Id获取所有章节列表
        List<XBQGChapter> chapterList = xbqgChapterService.list(xbqgBook.getId());
        for (XBQGChapter chapter:chapterList){
            XBQGChapterDetail xbqgChapterDetail = new XBQGChapterDetail();
            xbqgChapterDetail.setChapterId(chapter.getId());
            // 获取当前章节具体页面
            String result = CrawlUtil.crawl(chapter.getUrl(),"UTF-8");

            List<Element> detailList  = CrawlUtil.analyByRole(xbqgBook.getBookUrl(),result,"#content");
            String a1= "亲,点击进去,给个好评呗,分数越高更新越快,据说给新笔趣阁打满分的最后都找到了漂亮的老婆哦!";
            String a2= "手机站全新改版升级地址：http://m.xbiquge.la，数据和书签与电脑站同步，无广告清新阅读！";
            // 小说内容
            String detailTxt = detailList.get(0).text().replaceAll("&nbsp;"," ").replaceAll("\\\\n", "\n").replace(a1,"").replace(a2,"");
            xbqgChapterDetail.setContent(detailTxt);
            log.info(detailTxt);
            // 获取上一章  下一章的url
            List<Element> pageList = CrawlUtil.analyByRole(xbqgBook.getBookUrl(),result,".bottem2 a");
            String prev = pageList.get(1).absUrl("href");
            String next = pageList.get(3).absUrl("href");
            xbqgChapterDetail.setPrevUrl(prev);
            xbqgChapterDetail.setNextUrl(next);
            xbqgChapterDetailService.save(xbqgChapterDetail);
        }
    }

    @Override
    @Transactional
    public void crawlBook() {
        // 全部小说页面
        String allBookUrl = "http://www.xbiquge.la/xiaoshuodaquan/";
        //获取解析规则下所有的结果 即所有书籍
        Elements es = CrawlUtil.analy(allBookUrl,"UTF-8",".novellist a");
        es.forEach(element -> {
            XBQGBook book = new XBQGBook();
            book.setBookUrl(element.absUrl("href"));
            book.setName(element.text());
            xbqgBookService.save(book);
        });
    }

    @Override
    @Async
    public void crawlChapterList(XBQGBook xbqgBook) {
        String result = CrawlUtil.crawl(xbqgBook.getBookUrl(),"UTF-8");

        //获取小说类型
        List<Element> typeList = CrawlUtil.analyByRole(xbqgBook.getBookUrl(),result,".con_top a");
        // 解析出小说类型规则 typeName = "玄幻小说"
        String typeName = typeList.get(typeList.size()-1).text();
        // 根据类型名称获取小说类型
        XBQGType xbqgType = xbqgTypeService.findByName(typeName);
        // 如果为null则表示数据库没有此类型
        if(xbqgType==null){
            xbqgType = new XBQGType();
            xbqgType.setName(typeName);
            xbqgType = xbqgTypeService.save(xbqgType);
        }
        // 添加类型ID
        xbqgBook.setTypeId(xbqgType.getId());

        //获取作者名称
        List<Element> authorList = CrawlUtil.analyByRole(xbqgBook.getBookUrl(),result,"#info p");
        // 解析出作者名称   authorList.get(0).text()为：  作&nbsp;&nbsp;&nbsp;&nbsp;者：宅猪
        String authorName = authorList.get(0).text().split("：")[1];
        //添加作者名称
        xbqgBook.setAuthor(authorName);

        // 获取书籍简介
        List<Element> introductionList = CrawlUtil.analyByRole(xbqgBook.getBookUrl(),result,"#intro p");
        //<p><font style="color:#0066FF"><a href='http://m.xbiquge.la/wapbook/15409.html' target='_blank'>手机阅读《牧神记》无弹窗纯文字全文免费阅读</a></font>
        // </p>
        // <p>   大墟的祖训说，天黑，别出门。大墟残老村的老弱病残们从江边捡到了一个婴儿，取名秦牧，含辛茹苦将他养大。    这一天夜幕降临，黑暗笼罩大墟，秦牧走出了家门……做个春风中荡漾的反派吧！    瞎子对他说。秦牧的反派之路，正在崛起！</p>
        // 解析出书籍简介
        String introText = introductionList.get(1).text();
        // 添加书籍简介
        xbqgBook.setIntroduction(introText);

        // 获取书籍封面
        List<Element> imgList = CrawlUtil.analyByRole(xbqgBook.getBookUrl(),result,"#fmimg img");
        // <div id="fmimg"><img alt="牧神记" src="http://www.xbiquge.la/files/article/image/15/15409/15409s.jpg" width="120" height="150" /><span class="b"></span></div>
        //解析出图片详细路径
        String imgUrl =  imgList.get(0).attr("src");
        xbqgBook.setBookImgUrl(imgUrl);

        xbqgBookService.save(xbqgBook);

        //获取章节列表
        Elements es = CrawlUtil.analyByRole(xbqgBook.getBookUrl(),result,"#list a");
        es.forEach(element -> {
            XBQGChapter chapter = new XBQGChapter();
            chapter.setBookId(xbqgBook.getId());
            chapter.setTitle(element.text());
            chapter.setUrl(element.absUrl("href"));
            xbqgChapterService.save(chapter);
        });
    }

    @Override
    @Transactional
    public CrawlSite saveXBQGSite() {
        // 官网Url
        String siteUrl = "http://www.xbiquge.la";
        CrawlSite site = new CrawlSite();
        site.setCharset("UTF-8");
        site.setName("新笔趣阁");
        site.setUrl(siteUrl);
        return crawlSiteService.save(site);
    }
}
