package crawler;

import org.htmlparser.Node;
import org.htmlparser.Parser;
import org.htmlparser.tags.LinkTag;
import org.htmlparser.util.ParserException;
import org.htmlparser.visitors.ObjectFindingVisitor;

public class Crawler extends Thread {
	
	String _currentURL;
	
	public Crawler(String firstURL){
		_currentURL=firstURL;
	}
	
	public void setCurrentURL(String currentURL ){
		_currentURL=currentURL;
	}
	
	private void parseURL(){
		Parser parser = null;
		
		try {
			parser = new Parser (_currentURL);
		} catch (ParserException e) {
			e.printStackTrace();
		}
		
        ObjectFindingVisitor visitor = new ObjectFindingVisitor (LinkTag.class);
        
        try {
			parser.visitAllNodesWith (visitor);
		} catch (ParserException e) {
			e.printStackTrace();
		}
		
        Node[] links = visitor.getTags ();
        for (int i = 0; i < links.length; i++)
        {
            LinkTag linkTag = (LinkTag)links[i];
            System.out.print ("\"" + linkTag.getLinkText () + "\" => ");
            System.out.println (linkTag.getLink ());
        }
	
	}
	public void run(){
		while(true){
			
			synchronized(this){
				parseURL();
				
				try {
					this.wait();
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
			}
			
		}
	}
}
