package edu.hit.crawler.http;

import java.io.Serializable;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.List;

public class RuleSet implements Serializable {

	private static final long serialVersionUID = -1798644176087616627L;
	
	private List<RobotsEntry> rules = new ArrayList<RobotsEntry>();
	private static final String utf8Encoding = "UTF-8"; 

	public boolean isAllowed(URL url) {
		String path = url.getPath(); // check rules
		if ((path == null) || "".equals(path)) {
			path = "/";
		}
		return isAllowed(path);
	}

	public boolean isAllowed(String path) {
		try {
			path = URLDecoder.decode(path, utf8Encoding);
		} catch (Exception e) {
			// just ignore it- we can still try to match
			// path prefixes
		}

		int pos = 0;

		int end = rules.size();
		while (pos < end) {
			if (path.startsWith(rules.get(pos).getPrefix()))
				return rules.get(pos).getAllowed();
			pos++;
		}

		return true;
	}

	public void addRule(String prefix, boolean allow) {
		if (rules == null) {
			rules = new ArrayList<RobotsEntry>();
		}
		rules.add(new RobotsEntry(prefix, allow));
	}

	public void clearRules() {
		if (rules == null) {
			rules = new ArrayList<RobotsEntry>();
		} else
			rules.clear();
	}

	public void print() {
		for (RobotsEntry e : rules) {
			System.out.println(e.getAllowed() + " " + e.getPrefix());
		}
	}
	
	public List<RobotsEntry> getRules() {
		if(this.rules != null) {
			return rules;
		}
		return null;
	}
	
	public void setRules(List<RobotsEntry> rules) {
		if(rules != null) {
			this.rules = rules;
		}
	}
}