package com.hao.proxy.crawler;

import java.net.InetSocketAddress;
import java.util.HashSet;
import java.util.Set;

import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;

import com.hao.proxy.context.Constants;
import com.hao.proxy.model.Proxy;


public class WuyouCrawler extends AbstractCrawler {
	
	@Override
	protected Set<Proxy> getProxy() {
		Set<Proxy> set = new HashSet<>();
		try {
			Proxy proxy = Constants.PROXY_QUEUE.peek();
			Connection con = Jsoup.connect("http://www.data5u.com/free/gngn/index.shtml")
					.userAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36")
					.timeout(3000);
			if(proxy != null){
				con.proxy(new java.net.Proxy(java.net.Proxy.Type.HTTP, new InetSocketAddress(proxy.getHost(), proxy.getPort())));
			}
			Document doc = con.get();
			Elements links = doc.select("li ul");
			links.eachText().stream().forEach(s->{
				String[] item = s.split(" ");
				String host = item[0];
				if(!"IP".equals(host)){
					int port = Integer.valueOf(item[1]);
					set.add(new Proxy(host, port));
				}
			});
		}
		catch (Throwable e) {
		}
		return set;
	}
	
	public static void main(String[] args) {
		WuyouCrawler w = new WuyouCrawler();
		w.getProxy();
	}
	
}
