package com.hao.proxy.crawler;

import java.net.InetSocketAddress;
import java.util.HashSet;
import java.util.Set;

import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;

import com.hao.proxy.context.Constants;
import com.hao.proxy.model.Proxy;
import com.hao.proxy.utils.Sleep;


public class JXLCrawler extends AbstractCrawler {
	
	@Override
	protected Set<Proxy> getProxy() {
		Set<Proxy> set = new HashSet<>();
		try {
			for (int i = 1; i <= 4; i++) {
				Proxy proxy = Constants.PROXY_QUEUE.peek();
				Connection con = Jsoup.connect("http://ip.jiangxianli.com/?page=")
						.userAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36")
						.timeout(3000);
				if(proxy != null){
					con.proxy(new java.net.Proxy(java.net.Proxy.Type.HTTP, new InetSocketAddress(proxy.getHost(), proxy.getPort())));
				}
				Document doc = con.get();
				Elements links = doc.select("tr");
				links.eachText().stream().forEach(s->{
					String[] item = s.split(" ");
					String host = item[1];
					if(!"IP".equals(host)){
						int port = Integer.valueOf(item[2]);
						set.add(new Proxy(host, port));
					}
				});
				Sleep.sleep(1000);
			}
		}
		catch (Throwable e) {
		}
		return set;
	}
}
