package com.miyu.client1.service.magic.impl;


import com.alibaba.fastjson.JSON;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import us.codecraft.webmagic.*;
import us.codecraft.webmagic.pipeline.JsonFilePipeline;
import us.codecraft.webmagic.pipeline.Pipeline;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.selector.Selectable;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.List;

public class tuspider {
    public static void main(String[] args) {
        JSONObject data = new JSONObject();
        data.put("type","1");
        data.put("downType","m");
       // data.put("downType","t");
        String url ="https://xn--wgv69rba1382b.xyz/list-1-0-1.html";
        Request request = new Request();
        request.setUrl(url);
        request.setCharset("utf-8");
        request.putExtra("info",data);


        Spider.create(new Tspider()).addRequest(request)
                // .addPipeline(new ConsolePipeline())
                 //.addPipeline( new JsonFilePipeline("D:\\test\\")  )
                .addPipeline(new downLoadFile())
                .thread(50).run();
    }


}
class Tspider implements PageProcessor{

    @Override
    public void process(Page page) {

        String url=page.getUrl().get();
        JSONObject ext  = page.getRequest().getExtra("info");
        if(ext.getString("type").equals("2")){
            String name = page.getHtml().xpath("//div[@class='services-desc']/h2/text()").get();
            JSONArray ja = new JSONArray();
            if(ext.getString("downType").equals("m")){
                //ul[@class='listmh']/li[1]/a[@id='hb-1']
                //第一个链接 后根据下一页或下一章获取内容
              //  String one = page.getHtml().xpath("//ul[@class='listmh']/li[1]/a[@id='hb-1']").links().toString();
                List<Selectable> nodes = page.getHtml().xpath("//ul[@class='listmh']/li").nodes();
                for (Selectable li:nodes) {
                   String one = li.xpath("//a").links().all().get(0);
                    JSONObject data = new JSONObject();
                    data.put("type","3");
                    data.put("downType",ext.getString("downType"));
                    data.put("name",name);
                    Request request = new Request();
                    request.setUrl(one);
                    request.setCharset("utf-8");
                    request.putExtra("info",data);
                    page.addTargetRequest(request);
                    page.setSkip(true);
                }


            }else{
                //获取所有图片链接
                List<Selectable> nodes = page.getHtml().xpath("//ul[@class='contentmh']/li").nodes();
                for (Selectable li:nodes) {
                    JSONObject jo = new JSONObject();
                    String original = li.xpath("//img/@data-original").get();
                    jo.put("srcUrl",original);
                    jo.put("name",name);
                    ja.add(jo);
                }
                page.putField("name",name);
                page.putField("imgSrc",ja);
            }
        }else if(ext.getString("type").equals("1")){
            getLikes(page,ext);
            JSONObject data = new JSONObject();
            data.put("type","1");
            String s = page.getHtml().xpath("//ul[@class='stui-page text-center cleafix']/li[3]/a").links().all().get(0);
            if(s.indexOf("javascript")<0){
                data.put("type","1");
                data.put("downType",ext.getString("downType"));
                Request request = new Request();
                request.setUrl(s);
                request.setCharset("utf-8");
                request.putExtra("info",data);
                page.addTargetRequest(request);
                page.setSkip(true);
            }
        }
        else   if(ext.getString("type").equals("3")){
            //漫画解析
            //上级文件夹
            String name = ext.getString("name");
            JSONArray ja = new JSONArray();
            String lastName = page.getHtml().xpath("//div[@class='services-desc']/h2/text()").get();
            //获取所有图片链接
            List<Selectable> nodes = page.getHtml().xpath("//ul[@class='contentmh']/li").nodes();
            for (Selectable li:nodes) {
                JSONObject jo = new JSONObject();
                String original = li.xpath("//img/@data-original").get();
                jo.put("srcUrl",original);
                jo.put("name",lastName);
                ja.add(jo);
            }
            page.putField("name",name+'\\'+lastName);
            page.putField("imgSrc",ja);
        }
    }

    private void getLikes(Page page,JSONObject ext) {
        //当前数据
        List<String> likes = page.getHtml().xpath("//div[@class='content']/div[@class='top-grids']").links().all();
        JSONObject data = new JSONObject();
        data.put("type","2");
        data.put("downType",ext.getString("downType"));
        for (String plike: likes) {
            Request request = new Request();
            request.setUrl(plike);
            request.putExtra("info",data);
            page.addTargetRequest(request);
            page.setSkip(true);
        }
    }

    @Override
    public Site getSite() {
        return this.site;
    }
    //爬虫配置信息设置
    private Site site = new Site()
            .setCharset("utf-8")    //设置编码
            .setSleepTime(3)        //设置抓取间隔
            .setTimeOut(10000)         //设置超时时间
            .setRetrySleepTime(300)      //设置重试时间
            .setRetryTimes(3);       //设置重试次数

}

class  downLoadFile implements Pipeline {

    @Override
    public void process(ResultItems resultItems, Task task) {
        String fileName = resultItems.get("name").toString().trim();
        //路径
        String filePath = "H:\\爬虫数据\\图片\\han\\";
        JSONArray ja = resultItems.get("imgSrc");
        //先创建文件夹
        File fp = new File(filePath+fileName);
        if (!fp.exists()) {
            fp.mkdirs();// 能创建多级目录
        };
        for(int i=0;i<ja.size();i++){
            JSONObject j = (JSONObject) ja.get(i);
            String link = j.getString("srcUrl");
            URL url = null;
            try {
                url = new URL(link);
                URLConnection con = url.openConnection();

                InputStream inStream = con.getInputStream();
                if(inStream.available()==0)
                    continue;
                ByteArrayOutputStream outStream = new ByteArrayOutputStream();
                byte[] buf = new byte[1024];
                int len = 0;
                while((len = inStream.read(buf)) != -1){
                    outStream.write(buf,0,len);
                }
                String kz = link.substring(link.lastIndexOf("."),link.length());
                String FN = filePath+fileName+"//"+i+kz;
                File file = new File(FN.trim());
                if(!file.exists()) {
                    file.createNewFile();
                }
                FileOutputStream op = new FileOutputStream(file);
                op.write(outStream.toByteArray());
                inStream.close();
                outStream.close();
                op.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }


}


