package us.codecraft.webmagic.utils;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.GetMethod;

public class RobotsFileHandler {
	String url;
	List<String> Rules = new ArrayList<String>();
	List<Pattern> PatternRules = new ArrayList<Pattern>();
	
	public RobotsFileHandler (String url) throws IOException {
		this.url = url;
		getRules();
		genRulesPatterns();
	}
	
	public void getRules () throws IOException {
		HttpClient client = new HttpClient();
		GetMethod getMethod = new GetMethod(url);
		client.executeMethod(getMethod);
		
		InputStreamReader isr = new InputStreamReader(getMethod.getResponseBodyAsStream());
		BufferedReader br = new BufferedReader(isr);
		
		String line;
		
		while ((line = br.readLine()) != null) {
			if (line.toLowerCase().contains("disallow")) Rules.add(line.replaceAll("Disallow: ", "").replaceAll("\\*\\?\\*", "*").replaceAll("^", ".*").replaceAll("$", ".*"));
		}

	}
	
	public void  genRulesPatterns () {
		for (String s : Rules) {
			PatternRules.add(Pattern.compile(s));
		}
	}
	
	public static void main (String args[]) throws IOException {
		String url = "http://www.eelly.com/abc.html";
		
		RobotsFileHandler robots = new RobotsFileHandler("http://www.eelly.com/robots.txt");
		for (Pattern p : robots.PatternRules) {
			Matcher m = p.matcher(url);

			if (m.find()) {
				
				System.out.println("I can not download this url"+m);
			}
		}
	}
}
