import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import org.htmlparser.util.ParserException;
import websphinx.Crawler;
import websphinx.Link;
import websphinx.Page;

public class UrlCrawler extends Crawler {

	private static final long serialVersionUID = 1L;
	private HashMap<Integer,ExtendedLink> webLinks = new  HashMap<Integer, ExtendedLink>();
	private HashMap<Link, Query> queryLinks = new HashMap<Link, Query>();
	private HashMap<Link, Query>formLinks = new HashMap<Link, Query>();
	private int maxPageInQ = 0, maxPageToCollect=0;
	private Log logFile = null; 
	private String root;

	public UrlCrawler(int maxDepth,int maxPageInQ,
			int maxPageToCollect,String crawlerDomain,String baseURL) 
	throws IOException{
		super.clear();
		super.setMaxDepth(maxDepth);
		super.setIgnoreVisitedLinks(true);
		super.setDomain(SERVER);
		super.setRoot(new Link(new URL(baseURL)));
		this.root = baseURL;
		this.maxPageInQ = maxPageInQ;
		this.maxPageToCollect = maxPageToCollect;
		this.logFile = Log.getInstance("crawlLog.txt");
	}

	@Override
	public void visit(Page page) {
		try{
			System.out.println("Till now visited " + this.getPagesVisited() + "pages, and " + this.getPagesLeft() +" Pages left");
			logFile.WriteToLog("Till now visited " + this.getPagesVisited() + "pages, and " + this.getPagesLeft() +" Pages left");

		} catch (IOException e) {
			System.out.println(e.getMessage());
		}
	}

	@Override
	public void expand(Page page) {
		Link[] links = page.getLinks();
		ExtendedLink exLink ;
		for (Link link : links){
			if (this.visited(link))
				continue;
			try {
				exLink = new ExtendedLink(link);
				System.out.println("Now visiting: " + exLink.getLink().toURL() + " which is in depth " + page.getDepth());
				logFile.WriteToLog("Now visiting: " + exLink.getLink().toURL()  + " which is in depth " + page.getDepth());
				//check if should visit this link
				if (shouldVisit(exLink.getLink())){
					//TODO: extract domain name from url (serach.cnn.com --> cnn.com)
					//if (exLink.isLinkHTML() && (exLink.getLink().getHost().equals(new Link(root).getHost())) ){
					if (exLink.isLinkHTML() && (exLink.getLink().getHost().equals(new Link(root).getHost())) ){
						webLinks.put(exLink.getLink().getURL().hashCode(),exLink);
						this.submit(link);	
						logFile.WriteToLog(exLink.getLink().toURL() + " was succesfuly added to URL's database");
					}
					else{
						logFile.WriteToLog("This URL doesn't point to an HTML page or it is not at the same domain");
					}
				}

			}//try
			catch (IOException e){
				System.out.println(e.getMessage());
			} catch (ParserException e) {
				System.out.println(e.getMessage());
			}
		}
	}


	@Override
	public boolean shouldVisit(Link l) {
		try {
			if ((this.getPagesLeft()>=this.maxPageInQ) || (this.getPagesVisited()>=this.maxPageToCollect)){
				logFile.WriteToLog("Reached max number of pages. " + l.toURL() +" wasn't added to URL's DB.");
				return false;
			}
			if (l.getDepth()>this.getMaxDepth()){
				logFile.WriteToLog("Link's depth is  " + l.getDepth() +" which is bigger than " + this.getMaxDepth()+ ". link wasn't added to queue");
				return false;
			}
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return (isLinkURL(l)) ;
	}


	private boolean isLinkURL(Link link){
		String name = link.getFilename();
		if ( (name!=null) && ((name.endsWith("png")) ||  
				(name.endsWith("jpg"))||
				(name.endsWith("pdf")) ||
				(name.endsWith("ico")) ||
				(name.endsWith("css"))||
				(name.endsWith("gif")) ||
				(name.endsWith("js"))) ){
			try{
				logFile.WriteToLog("This link doesn't point to an HTML page");
			}catch (IOException e){
				System.out.println(e.getMessage());
			}
			return false;
		}
		else
			return true;
	}



	public HashMap<Integer, ExtendedLink> getWebLinks() {
		return webLinks;
	}

	public HashMap<Link, Query> getQueryLinks() {
		return queryLinks;
	}
	public HashMap<Link, Query> getFormLinks() {
		return formLinks;
	}
}
