package com.lry.crawlers.fiction.processor;

import com.lry.crawlers.fiction.entity.FictionVariable;
import com.lry.crawlers.util.ProcessVariable;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.processor.PageProcessor;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class FictionUpdateProcessor implements PageProcessor {
    public Map<String,Integer> map;
    private Site site = Site.me()
            .setSleepTime(1)//采集间隔时间
            .setTimeOut(1000*10)//超时时间
            .setRetrySleepTime(3000)//重试时间
            .setRetryTimes(3);//重试次数

    public FictionUpdateProcessor() {
    }
    public FictionUpdateProcessor(Map<String,Integer> map) {
        this.map = map;
    }

    @Override
    public void process(Page page) {
        String url = page.getUrl().toString();

        if(!url.contains("www.baidu.com")){
            //更新代码
            //小说章节更新
            FictionVariable fictionVariable = new FictionVariable();
            for(FictionVariable fv : ProcessVariable.fVariable.values()){
                if(url.contains(fv.getUrl())){
                    fictionVariable = fv;
                }
            }
            String[] titless = fictionVariable.getTitle().split("-!-");
            List<String> titlee = page.getHtml().xpath(titless[0]).regex(titless.length !=1 ? titless[1] : ".+").all();
            String[] contentss = fictionVariable.getContent().split("-!-");
            List<String> contentt = page.getHtml().xpath(contentss[0]).regex(contentss.length !=1 ? contentss[1] : ".+").all();

            if(titlee.size() !=0){

                //获取小说内容信息：content
                //获取小说章节信息：chapterTitle，chapterUrl，chapterId

                String[] chapterTitles = fictionVariable.getChapterTitle().split("-!-");
                String[] chapterUrls = fictionVariable.getChapterUrl().split("-!-");
                String[] updateTimes = fictionVariable.getUpdateTime().split("-!-");
                List<String> chapterTitle = page.getHtml().xpath(chapterTitles[0]).regex(chapterTitles.length !=1 ? chapterTitles[1] : ".+").all();
                List<String> chapterUrl = page.getHtml().xpath(chapterUrls[0]).regex(chapterUrls.length !=1 ? chapterUrls[1] : ".+").all();
                List<String> updateTime = page.getHtml().xpath(updateTimes[0]).regex(updateTimes.length !=1 ? updateTimes[1] : ".+").all();
                if(chapterTitles.length ==3){
                    String[] splitTitl = chapterTitles[2].split(",");
                    if(splitTitl.length ==1){
                        chapterTitle = chapterTitle.subList(Integer.parseInt(splitTitl[0]),chapterTitle.size());
                    }else{
                        chapterTitle = chapterTitle.subList(Integer.parseInt(splitTitl[0]),Integer.parseInt(splitTitl[1]));

                    }
                }
                if(chapterUrls.length == 3){
                    String[] splitUrls = chapterUrls[2].split(",");
                    if(splitUrls.length ==1){
                        chapterUrl = chapterUrl.subList(Integer.parseInt(splitUrls[0]),chapterUrl.size());
                    }else{
                        chapterUrl = chapterUrl.subList(Integer.parseInt(splitUrls[0]),Integer.parseInt(splitUrls[1]));
                    }
                }

                List<String> chapterId = new ArrayList<String>();
                //获取对应的fictionId
                String[] chapterUrl1 =  chapterUrl.get(0).split("/");
                String fictionId = chapterUrl1[chapterUrl1.length-2];
                //获取小说章节与内容的对应的chapterId,并处理无域名url
                if(!chapterUrl.get(0).contains("http")){
                    ArrayList<String> listUrl = new ArrayList<String>();
                    String protocol =fictionVariable.getProtocol();
                    String fvurl = fictionVariable.getUrl();
                    chapterUrl.forEach(str -> {
                        listUrl.add(protocol+"://"+ fvurl +str);
                        String[] st =  str.split("/");
                        chapterId.add(st[st.length-1].replace(".html",""));
                    });
                    chapterUrl=listUrl;
                }else{
                    chapterUrl.forEach( u-> {
                        String[] st =  u.split("/");
                        chapterId.add(st[st.length-1].replace(".html",""));
                    });
                }
                //获取现在的章节数量
                int xsize = chapterUrl.size();
                //获取原本的章节数量
                int size = map.get(url);
                if(xsize>size){
                    //创建储存Map数据，用于业务处理
                    Map<String,Object> map = new HashMap<>();
                    map.put("key","chapter");
                    map.put("chapterTitle",chapterTitle.subList(size,xsize));
                    map.put("chapterId", chapterId.subList(size,xsize));
                    map.put("fictionId",fictionId);
                    map.put("updateTime",updateTime.get(0));
                    map.put("chapterNum",xsize);
                    map.put("updateUrl",url);
                    map.put("id",fictionVariable.getId());
                    //提交数据，给Pipeline处理
                    page.putField("data",map);
                    //把小说章节链接加入爬取列表
                    page.addTargetRequests(chapterUrl.subList(size,xsize));
                }
            }
            else if(contentt.size()!=0){
                //获取小说内容信息：content
                String[] contents = fictionVariable.getContent().split("-!-");
                List<String> content = page.getHtml().xpath(contents[0]).regex(contents.length !=1 ? contents[1] : ".+").all();
                //获取小说内容与章节对应的fictionId
                String[] listUrl =  page.getUrl().toString().split("/");
                String chapterId = listUrl[listUrl.length-1].replace(".html","");
                String fictionId = listUrl[listUrl.length-2];
                //创建储存Map数据，用于业务处理
                Map<String,Object> map = new HashMap<>();
                map.put("key","content");
                map.put("content",content);
                map.put("chapterId",chapterId);
                map.put("fictionId",fictionId);
                map.put("id",fictionVariable.getId());
                map.put("fictionVariable",fictionVariable);
                //提交数据，给Pipeline处理
                page.putField("data",map);
            }
        }
        else{
            page.addTargetRequests(new ArrayList<>(map.keySet()));
        }

    }

    @Override
    public Site getSite() {
        return site;
    }

}
