package cs421;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class Crawler {

	public Crawler(int maxDepth) {
		this.maxDepth = maxDepth;
	}

	public void crawl(Url url) throws IOException {
		recursiveCrawl(url, 0, null);
	}
	
	private void recursiveCrawl(Url url, int depth, String root) throws IOException {
		if(depth > maxDepth)
			return;

		/*
		if(url.isRelative()) {
			String cur = url.getPath() + url.getFile();
			int i;
			if((i = cur.indexOf('/')) == 0)
				cur = cur.substring(i + 1);
			url = new Url(cur, root);
		}
		*/
		if(history.contains(url))
			return;
		
		System.out.printf("[%2d] %-60s", depth, url);
		HttpRequest r = new HttpRequest(url);
		try {
			r.send();
		}
		catch(Exception e) {
			e.printStackTrace();
			System.err.println("    error!");
			return;
		}
		finally {
			history.add(url);
		}
		
		// save current doc
		OutputStream os = new BufferedOutputStream(new FileOutputStream(url.getFile()));
		os.write(r.getContents());
		os.close();
		System.out.println(" -> " + url.getFile());
		
		// parse tags, find links
		if(r.isHtml()) {
			String bd = new String(r.getContents(), r.charset());
			
			//System.out.println("looking for <img> tags...");
			Matcher m = IMAGE_PATTERN.matcher(bd);
			while(m.find())
				recursiveCrawl(new Url(m.group(2)), depth + 1, r.workingPath());
			
			//System.out.println("looking for <a> tags...");
			m = ANHOR_PATTERN.matcher(bd);
			while(m.find()) {
				//System.out.println("\tlink: " + m.group(2));
				recursiveCrawl(new Url(m.group(2)), depth + 1, r.workingPath());
			}
			//System.out.println("finsihed with links");
		}
	}

	public static void main(String[] args) throws IOException {
		if(args.length < 2) {
			System.err.println("usage: java Crawler <URL> <maximum depth>");
			return;
		}
		
		Url u = new Url(args[0]);
		int maxDepth = Integer.valueOf(args[1]);
		
		Crawler c = new Crawler(maxDepth);
		c.crawl(u);
	}
	
	private int maxDepth;

	private Collection<Url> history = new HashSet<Url>();
	
	private static final Pattern ANHOR_PATTERN = Pattern.compile(
			"<a.*?\\bhref=(\")?([^\"]*)\\1", Pattern.CASE_INSENSITIVE);
	
	private static final Pattern IMAGE_PATTERN = Pattern.compile(
			"<img.*?\\bsrc=(\")?([^\"]*)\\1", Pattern.CASE_INSENSITIVE);
}

