package com.rent.spider;

import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.io.IOException;
import java.util.ArrayList;
import java.util.concurrent.*;
import java.util.concurrent.locks.ReentrantLock;

/*
@李子宁
Happy,happy everyday!
冲鸭！
2020/8/8

*/
public class Houses {
    public static ReentrantLock lock = new ReentrantLock();
    //new ThreadPoolExecutor(25,50,200, TimeUnit.SECONDS,new ArrayBlockingQueue<>(30),new ThreadPoolExecutor.CallerRunsPolicy());
    public static String House(String link) throws IOException{
        String proxyHost = "127.0.0.1";
        String proxyPort = "19856";
        System.setProperty("http.proxyHost", proxyHost);
        System.setProperty("http.proxyPort", proxyPort);
        // 对https也开启代理
        System.setProperty("https.proxyHost", proxyHost);
        System.setProperty("https.proxyPort", proxyPort);
        Connection connect = Jsoup.connect(link);
        connect.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36");
        Document document = connect.get();
        return document.toString();
    }

    public static void main(String[] args) throws IOException {
        String proxyHost = "127.0.0.1";
        String proxyPort = "11356";
        System.setProperty("http.proxyHost", proxyHost);
        System.setProperty("http.proxyPort", proxyPort);
        // 对https也开启代理
        System.setProperty("https.proxyHost", proxyHost);
        System.setProperty("https.proxyPort", proxyPort);  //设置代理来访问豆瓣

        //String s1 = "https://nanjing.baixing.com/zhengzu/?page=";
        //String s1 = "https://nj.lianjia.com/zufang/pg";
        String s1 = "https://nj.5i5j.com/zufang/n";
        //String s1 = "https://nj.zu.ke.com/zufang/pg";
        //String s1 = "http://nj.rent.house365.com/district_rent/p";
        ArrayList<String> list = new ArrayList<>();
        for (int i =1; i <= 695; i++) {
            list.add(s1+""+i+"/");
        }
       /* WebTheadlj webt[]= new WebTheadlj[55];  //存储爬网页的线程
        int webpos= 0;*/
        ExecutorService service = Executors.newSingleThreadExecutor();
        //遍历每一页的url,爬取数据
        for (String string : list) {
            Connection 	conn = Jsoup.connect(string).timeout(5000);
            conn.header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8");
            conn.header("Accept-Encoding", "gzip, deflate, sdch");
            conn.header("Accept-Language", "zh-CN,zh;q=0.8");
            conn.header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6");
            Document doc = conn.get();
            service.execute(new Thread5i5j(string, doc));
          /*  webt[webpos]= new WebTheadlj(string, doc);
            webt[webpos++].start();*/
        }

      /* for(int i= 0; i< 20; i++) {  //防止爬取线程出现乱序
            try {
                webt[i].join();
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }*/


    }

}
