package com.dongman.scrapy;

import cn.wanghaomiao.xpath.model.JXDocument;
import cn.wanghaomiao.xpath.model.JXNode;
import com.dongman.model.ChapterPic;
import com.dongman.model.Constants;
import com.dongman.model.Novel;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * 主页信息
 */
public class CrawOneLink {

    /**
     * 获取某个动漫的基本信息
     * @param url
     * @return
     */
    public static Novel getNovelDetailByUrl(String url) {
        String[] spls = url.split("/");
        String novelId = spls[spls.length - 1].replace(".html", "");
        Novel novel = null;
        Connection conn = Jsoup.connect(url);
        // 3.伪造User-Agent(模拟一个真实的浏览器访问)
        conn.header("User-Agent", CrawlUtil.getUserAgent());
        // 4.获取网页源码并转换成文档树结构
        Document document;
            try {
                document = CrawlUtil.connectOneMoreTimes(conn, 3);
                //5.将document转换成xpath能解析的文档树结构
                JXDocument jdoc = new JXDocument(document);

                String name = jdoc.selN("//p[@class='comic-title j-comic-title']/text()").get(0).toString();
                String desc = jdoc.selN("//div[@class='comic-intro']/p[@class='intro-total']/text()").get(0).getTextVal();
                String status = jdoc.selN("//span[@class='update-time']/text()").get(0).getTextVal();
                List<JXNode> four = jdoc.selN("//div[@class='comic-status']/span");
                String ticai = "";
                String shoucang = "";
                String renqi = "";
                String yeMianDate = "";
                try{
                    ticai = four.get(0).sel("//a/text()").get(0).getTextVal();
                    for (JXNode node: four.get(0).sel("//a/text()")){
                        ticai = ticai + node.getTextVal() + " ";
                    }
                }catch (Exception e){

                }
                try{
                    shoucang = four.get(1).sel("//b/text()").get(0).getTextVal();
                }catch (Exception e){

                }
                try{
                    renqi = four.get(2).sel("//b/text()").get(0).getTextVal();
                }catch (Exception e){

                }
                try{
                    yeMianDate = four.get(3).sel("//b/text()").get(0).getTextVal();
                }catch (Exception e){

                }
                novel = new Novel(
                        novelId,
                        new Date(),  new Date(),  name,  desc,
                        status.replace("第", "").replace("话", ""),
                        0,
                        status,
                        url,  ticai,  shoucang,  renqi,  yeMianDate
                );
                novel.toString();
                List<JXNode> jxNodeList = jdoc.selN("//ul[@class='chapter__list-box clearfix']/li/a");
                novel.setTotalChapters(jxNodeList.size());
                Thread.sleep(Constants.sleepSeconds);
            } catch (Exception e) {
                e.printStackTrace();
            }
        return novel;
    }

    /**
     * 获取某个章节所有动漫地址
     * @param url
     * @return
     */
    public static List<ChapterPic> getAllPicsByLink(String url, String cookie, String username, String password) {
        String suffix = ".html";
        String[] picls = url.split("/");
        String chapterId = picls[picls.length - 1].replace(suffix, "");
        Connection conn = Jsoup.connect(url);

        List<ChapterPic> chapterPics = new ArrayList<>();

        // 3.伪造User-Agent(模拟一个真实的浏览器访问)
        conn.header("User-Agent", CrawlUtil.getUserAgent());
        conn.header("cookie", cookie);
        // 4.获取网页源码并转换成文档树结构
        Document document = CrawlUtil.connectOneMoreTimes(conn, 3);
        //5.将document转换成xpath能解析的文档树结构
        JXDocument jdoc = new JXDocument(document);
        List<JXNode> jxNodeList = new ArrayList<>();
        try {
            jxNodeList = jdoc.selN("//div[@class='rd-article-wr clearfix']/div");
            for (int i = 0; i < jxNodeList.size(); i++) {
                JXNode jxNode = jxNodeList.get(i);
                String id = jxNode.getElement().attr("data-pid");
                String picLink = jxNode.getElement().child(0).attr("data-src");
                String basePicName = jxNode.getElement().child(0).attr("alt");
                ChapterPic chapterPic = new ChapterPic(
                        id, new Date(), new Date(), picLink, basePicName, chapterId, Constants.NoDownToLocal
                );
                chapterPics.add(chapterPic);
            }
            Thread.sleep(Constants.sleepSeconds);
        }
        catch (Exception e) {
            e.printStackTrace();
        }

        List<ChapterPic> vipChapterPics = CrawlUtil.getChapterPicsByLink(url, cookie, username, password);
        chapterPics.addAll(vipChapterPics);

        return chapterPics;
    }

    public static void main(String[] args) {
        getNovelDetailByUrl("https://yc.nxzhzj.cn/estateTradeR01/portals/query!listSalePermitProject");
    }

}
