package wox.lonice.utis;

import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpHost;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import wox.lonice.entity.ProxyIp;

import java.io.IOException;
import java.util.List;
import java.util.stream.Stream;

public class IpProxyUtil {

    public static void main(String[] args) {
//        List<ProxyIp> ipMessages = getIpMessage();
//        System.out.println(ipMessages);
        String ip = "103.111.56.190";
        String port = "54126";
        String cookie = "CXID=B655D4151DF3B7980CD4BD8EBEEACE14; SUID=AD54E2DD4C238B0A5CCABAE6000EF114; ad=hM1X9lllll2t@ZZslllllV8@RjDlllllTWJUryllll9llllljZlll5@@@@@@@@@@; IPLOC=CN3201; SUV=1560504342116209; ABTEST=8|1560504351|v1; SNUID=CA3C8ABA676DE3225CE07773683066B6; weixinIndexVisited=1; usid=FEKUmPf_426pMDxK; ld=$yllllllll2NlLPMlllllV1RzG1lllllTWJUryllllwlllllRklll5@@@@@@@@@@; LSTMV=282%2C629; LCLKINT=15593; JSESSIONID=aaa4fjDlt0Viz7wsSOnRw; sct=4";
        String url = "https://weixin.sogou.com/weixin";
        String param = "type=1&s_from=input&query=%E7%BE%8E%E5%AE%B9&ie=utf8&_sug_=n&_sug_type_=";
        String result = HttpUtil.sendProxyCookieGet(ip, port, cookie, url, param, HttpUtil.iPhoneUserAgent);

        System.out.println("返回值 ----------- " + result);


//        WebClient webClient = new WebClient(BrowserVersion.FIREFOX_45,"103.111.56.190",54126);
//        try {
//            HtmlPage page=webClient.getPage("https://www.ailonice.com"); // 解析获取页面
//            System.out.println("网页html:"+page.asXml()); // 获取Html
//            System.out.println("====================");
//            System.out.println("网页文本："+page.asText()); // 获取文本
//        } catch (FailingHttpStatusCodeException e) {
//            // TODO Auto-generated catch block
//            e.printStackTrace();
//        } catch (MalformedURLException e) {
//            // TODO Auto-generated catch block
//            e.printStackTrace();
//        } catch (IOException e) {
//            // TODO Auto-generated catch block
//            e.printStackTrace();
//        }finally{
//            webClient.close(); // 关闭客户端，释放内存
//        }
////        String  url="http://news.cnblogs.com/";//想采集的网址
//        String refer="http://www.cnblogs.com/";
//        URL link=new URL(url);
//        WebClient wc=new WebClient();
//        WebRequest request=new WebRequest(link);
//        request.setCharset("UTF-8");
//        request.setProxyHost("120.120.120.x");
//        request.setProxyPort(8080);
//        request.setAdditionalHeader("Referer", refer);//设置请求报文头里的refer字段
//        ////设置请求报文头里的User-Agent字段
//        request.setAdditionalHeader("User-Agent", "Mozilla/5.0 (Windows NT 5.1; rv:6.0.2) Gecko/20100101 Firefox/6.0.2");
//        //wc.addRequestHeader("User-Agent", "Mozilla/5.0 (Windows NT 5.1; rv:6.0.2) Gecko/20100101 Firefox/6.0.2");
//        //wc.addRequestHeader和request.setAdditionalHeader功能应该是一样的。选择一个即可。
//        //其他报文头字段可以根据需要添加
//        wc.getCookieManager().setCookiesEnabled(true);//开启cookie管理
//        wc.getOptions().setJavaScriptEnabled(true);//开启js解析。对于变态网页，这个是必须的
//        wc.getOptions().setCssEnabled(true);//开启css解析。对于变态网页，这个是必须的。
//        wc.getOptions().setThrowExceptionOnFailingStatusCode(false);
//        wc.getOptions().setThrowExceptionOnScriptError(false);
//        wc.getOptions().setTimeout(10000);
//        //设置cookie。如果你有cookie，可以在这里设置
//        Set<Cookie> cookies=null;
//        Iterator<Cookie> i = cookies.iterator();


    }

    public static List<ProxyIp> getIpMessage() {
//        http://www.66ip.cn/
        List<String> urls = Lists.newArrayList();
        //构造种子url(4000条ip)
        Stream.iterate(1, rowNum -> rowNum + 1).limit(40)
                .forEach(r -> urls.add("http://www.xicidaili.com/nn/" + r));
        //urlParse存放爬取下来的ip信息     IPIsable对拿到的ip进行质量检测，将质量不合格的ip在List里进行删除
        return IPIsable(urlParse(urls));
    }

    //使用代理进行爬取
    private static List<ProxyIp> urlParse(List<String> urls) {
        List<ProxyIp> proxyIps = Lists.newArrayList();
        urls.parallelStream().forEach(url -> {
            String html = getHtml(url);
            System.out.println("---------------- " + url + " -------------- " + html);
            if (StringUtils.isNotEmpty(html)) {
                //将html解析成DOM结构
                Document document = Jsoup.parse(html);
                //提取所需要的数据
                Elements trs = document.select("table[id=ip_list]").select("tbody").select("tr");
                for (Element element : trs) {
                    ProxyIp proxyIp = new ProxyIp();
                    System.out.println("-----------------" + JSONObject.toJSONString(element));
                    String ipAddress = element.select("td").get(1).text();
                    String ipPort = element.select("td").get(2).text();
                    String ipType = element.select("td").get(5).text();
                    String ipSpeed = element.select("td").get(6).select("div[class=bar]").
                            attr("title");

                    proxyIp.setAddress(ipAddress);
                    proxyIp.setPort(ipPort);
                    proxyIp.setType(ipType);
                    proxyIp.setSpeed(ipSpeed);

                    proxyIps.add(proxyIp);
                }
            }
        });
        return proxyIps;
    }

    private static List<ProxyIp> IPIsable(List<ProxyIp> proxyIps) {
        CloseableHttpClient httpClient = HttpClients.createDefault();
        List<ProxyIp> returnProxyIps = Lists.newArrayList();

        HttpGet httpGet = new HttpGet("https://www.baidu.com");
        httpGet.setHeader("Accept", "text/html,application/xhtml+xml,application/xml;" +
                "q=0.9,image/webp,*/*;q=0.8");
        httpGet.setHeader("Accept-Encoding", "gzip, deflate, sdch");
        httpGet.setHeader("Accept-Language", "zh-CN,zh;q=0.8");
        httpGet.setHeader("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit" +
                "/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36");

        proxyIps.parallelStream().forEach(proxyIp -> {
            String ip = proxyIp.getAddress();
            String port = proxyIp.getPort();

            HttpHost proxy = new HttpHost(ip, Integer.parseInt(port));
            RequestConfig config = RequestConfig.custom().setProxy(proxy).setConnectTimeout(3000).
                    setSocketTimeout(3000).build();
            httpGet.setConfig(config);

            CloseableHttpResponse response = null;
            try {
                response = httpClient.execute(httpGet);
                returnProxyIps.add(proxyIp);
            } catch (IOException e) {
                System.out.println("不可用代理已删除" + ip + ": " + port);
            } finally {
                if (ObjectUtil.isNotEmpty(response)) {
                    try {
                        response.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }
        });
        if(ObjectUtil.isNotEmpty(httpClient)){
            try {
                httpClient.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return returnProxyIps;
    }

    //对上一个方法的重载，使用本机ip进行网站爬取
    private static String getHtml(String url) {
        String entity = null;
        CloseableHttpClient httpClient = HttpClients.createDefault();

        //设置超时处理
        RequestConfig config = RequestConfig.custom().setConnectTimeout(3000).
                setSocketTimeout(3000).build();
        HttpGet httpGet = new HttpGet(url);
        httpGet.setConfig(config);

        httpGet.setHeader("Accept", "text/html,application/xhtml+xml,application/xml;" +
                "q=0.9,image/webp,*/*;q=0.8");
        httpGet.setHeader("Accept-Encoding", "gzip, deflate, sdch");
        httpGet.setHeader("Accept-Language", "zh-CN,zh;q=0.8");
        httpGet.setHeader("Cache-Control", "no-cache");
        httpGet.setHeader("Connection", "keep-alive");
        httpGet.setHeader("Host", "www.xicidaili.com");
        httpGet.setHeader("Pragma", "no-cache");
        httpGet.setHeader("Upgrade-Insecure-Requests", "1");
        httpGet.setHeader("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 " +
                "(KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36");

        try {
            //客户端执行httpGet方法，返回响应
            CloseableHttpResponse httpResponse = httpClient.execute(httpGet);

            //得到服务响应状态码
            if (httpResponse.getStatusLine().getStatusCode() == 200) {
                entity = EntityUtils.toString(httpResponse.getEntity(), "utf-8");
            }

            httpResponse.close();
            httpClient.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return entity;
    }
}
