package org.flyer.toolFrame.service;

import cn.edu.hfut.dmic.webcollector.model.CrawlDatum;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler;
import cn.edu.hfut.dmic.webcollector.plugin.net.OkHttpRequester;
import cn.edu.hfut.dmic.webcollector.util.ExceptionUtils;
import com.alibaba.fastjson.JSON;
import okhttp3.MultipartBody;
import okhttp3.Request;
import okhttp3.RequestBody;
import org.flyer.toolFrame.pojo.JsonData;
import org.flyer.toolFrame.util.FileUtil;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Service
public class GitHubToolServiceImpl implements GitHubToolService {
    @Value("classpath:/data.json")
    private Resource data;

    @Override
    public List<String> getGitHubHost() {
        try {
            //读取Json数据
            String s = FileUtil.jsonRead(data.getInputStream());
            JsonData jsonData = JSON.parseObject(s, JsonData.class);
            //获取所需要的对象数据
            return jsonData.getGitHubTool().getGitHubHostList();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 爬取ip
     *
     * @param gitHubHostList host集合
     * @return hostMapping <host,ip>
     */
    @Override
    public Map<String, String> getHostMapping(List<String> gitHubHostList) {
        Map<String, String> hostMapping = new HashMap<>();
        double i = 0;
        //调用爬虫
        for (String hostName : gitHubHostList) {
            IpIpCrawler ipIpCrawler = new IpIpCrawler("crawler", true);
//            ip.cn
            ipIpCrawler.addSeed(new CrawlDatum("https://www.ip.cn/ip/" + hostName + ".html").meta("method", "GET"));
//            ipip.net
//            ipIpCrawler.addSeed(new CrawlDatum("https://tools.ipip.net/domain.php").meta("method", "POST").meta("query",hostName));
            try {
                ipIpCrawler.start(1);
                hostMapping.put(hostName, ipIpCrawler.getHostAddress());
                System.out.println("已完成:" + String.format("%.2f", (++i / gitHubHostList.size() * 100)) + "%");
            } catch (Exception e) {
                e.printStackTrace();
            }

        }
        return hostMapping;
    }


    public class IpIpCrawler extends BreadthCrawler {
        private String hostAddress;

        public String getHostAddress() {
            return hostAddress;
        }

        /**
         * 构造一个基于伯克利DB的爬虫
         * 伯克利DB文件夹为crawlPath，crawlPath中维护了历史URL等信息
         * 不同任务不要使用相同的crawlPath
         * 两个使用相同crawlPath的爬虫并行爬取会产生错误
         *
         * @param crawlPath 伯克利DB使用的文件夹
         * @param autoParse 是否根据设置的正则自动探测新URL
         */
        public IpIpCrawler(String crawlPath, boolean autoParse) {
            super(crawlPath, autoParse);
            /**
             * 设置请求插件
             */
            setRequester(new OkHttpRequester() {
                @Override
                public Request.Builder createRequestBuilder(CrawlDatum crawlDatum) {
                    Request.Builder requestBuilder = super.createRequestBuilder(crawlDatum);
                    String method = crawlDatum.meta("method");
                    // 默认就是GET方式，直接返回原来的即可
                    if (method.equals("GET")) {
                        return requestBuilder;
                    }

                    if (method.equals("POST")) {
                        RequestBody requestBody;
                        String query = crawlDatum.meta("query");
                        // 如果没有表单数据query，POST的数据直接在URL中
                        if (query == null) {
                            requestBody = RequestBody.create(null, new byte[]{});
                        } else {
                            // 根据meta构建POST表单数据
                            requestBody = new MultipartBody.Builder()
                                    .setType(MultipartBody.FORM)
                                    .addFormDataPart("query", query)
                                    .build();
                        }
                        return requestBuilder.post(requestBody);
                    }

                    //执行这句会抛出异常
                    ExceptionUtils.fail("wrong method: " + method);
                    return null;
                }
            });

        }

        @Override
        public void visit(Page page, CrawlDatums next) {
            String first="";
//            ip.cn
            first = page.select("th[style='font-weight: bolder;']").first().select("div").first().text();
//            ipip.net
//            first = page.select("div[class='panel-body']").first().select("pre").last().text();
//            first = first.split("-")[0].trim();

            this.hostAddress = first;
        }
    }


}
