package mylibrary.spider.framework;

import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.URL;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;

public class Crawler {

	public static Set handlingUrl = new HashSet();
	
	final static int URL_QUEUE_SIZE = 10000;
	public static BlockingQueue<String> detectedUrl = new ArrayBlockingQueue<String>(URL_QUEUE_SIZE);
	
	
	public void crawl(Page page) throws Exception {
		
		handlingUrl.add(page.getUrl());
		
		
		//
//		URL url = new URL(page.getUrl());
//		StringBuffer pageBuffer = new StringBuffer();
//		BufferedReader reader = new BufferedReader(new InputStreamReader(url.openStream()));
//		String line = null;
//		while((line=reader.readLine()) != null) {
//			pageBuffer.append(line);
//		}
//		page.setContent(pageBuffer.toString());
		//
		
		HttpGet request = new HttpGet(page.getUrl());
		HttpResponse response = new DefaultHttpClient().execute(request);
		int statusCode = response.getStatusLine().getStatusCode();
		if(statusCode == 200) {
			ByteArrayOutputStream bos = new ByteArrayOutputStream();
			response.getEntity().writeTo(bos);
			bos.close();
			String content = bos.toString();
			page.setContent(content);
		}
		
		Pattern p = Pattern.compile("<a\\s+href\\s*=\\s*\"?(.*?)[\"|>]", Pattern.CASE_INSENSITIVE);
		Matcher m = p.matcher(page.getContent());
		
		while(m.find()) {
			String link = m.group(1).trim();
			if(!detectedUrl.contains(link)) {
				detectedUrl.put(link);	
//				System.out.println(link);
			}
		}	

		
	}
}
