package com.crawler.base;

import java.io.IOException;
import java.net.MalformedURLException;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

public class CrawlerWorker extends Thread {

	private URL seedURL;
	private String mSeedURL;
	private String mHost;
	private int mPoliteness;
	private int mMaxPages;

	private ICrawlerEventHandler mCrawlerEvent;
	private IOutputInterface mOutput;
	private Map<String, Page> mPageMap;
	private List<String> mPendingList;
	private List<String> mVisitedList;

	private boolean mIsSmartCrawler;
	private boolean mIsRel;
	private String mLock;

	private CrawlerBase mCrawlerBase;

	public CrawlerWorker(CrawlerBase crawlerBase, IOutputInterface Output, int nPoliteness, int nMaxPages, String strURL, ICrawlerEventHandler crawlerEvent, boolean bIsRel, String strLock, boolean bSmartClawler)
	{
		mCrawlerBase = crawlerBase;
		mOutput = Output;
		mIsSmartCrawler = bSmartClawler;
		mPageMap = new HashMap<String, Page>();
		mPendingList = new ArrayList<String>();
		mVisitedList = new ArrayList<String>();
		mOutput.OutputPrint("Worker Init....");
		mOutput.OutputPrint(String.format("Settings:\nPoliteness[%d]\nMax Pages[%d]", nPoliteness, nMaxPages));
		mOutput.OutputPrint(String.format("Seed URL [%s]", strURL));
		mPoliteness = nPoliteness;
		mMaxPages = nMaxPages;
		mIsRel = bIsRel;
		mPageMap.clear();
		mPendingList.clear();
		mSeedURL = strURL;
		mCrawlerEvent = crawlerEvent;
		mLock = strLock;
		try {
			seedURL = new URL(strURL);
			mHost = seedURL.getHost();
		} catch (MalformedURLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	private void StartCrawler() throws IOException
	{
		synchronized(mLock)
		{

			//mPendingList.add(mSeedURL);

			Document seedDoc = Jsoup.connect(mSeedURL).get();
			FillPendingList(seedDoc);
			while(mPendingList.size()>0)
			{
				String url = mPendingList.get(0);
				mPendingList.remove(0);
				try{

					Document doc = Jsoup.connect(url).get();

					if(mPageMap.containsKey(url)
							|| mVisitedList.contains(url))
					{
						continue;
					}
					FillPendingList(doc);
					mVisitedList.add(url);
					String strTitle = GetPageTitle(doc);
					String strContent = GetPageTextContent(doc);
					Page page = new Page(url, strTitle, strContent, mCrawlerBase.GetFeatureList());

					//mCrawlerEvent.OnProcessLink(nPageCount, url, page, mIsRel);
					page.ProcessPage();
					if(mIsSmartCrawler && !Classifier.IsMatch(page))
					{
						continue;
					}
					mPageMap.put(url, page);
					mCrawlerBase.AddPageToMap(page, mIsRel);
					mOutput.OutputPrint(String.format("Page[%d]-Title[%s]", mPageMap.size(), strTitle));

					if(mCrawlerEvent != null)
						mCrawlerEvent.OnProcessLink(mPageMap.size(), url, page, mIsRel);

					if(mPageMap.size() >= mMaxPages)
					{
						mOutput.OutputPrint("[STOPPED]...Max page reached!");
						break;
					}

					Thread.sleep(mPoliteness*1000);
				}
				catch(SocketTimeoutException e)
				{
					System.err.println("Socket Timeout....Sleep for another 10 seconds");
					try {
						Thread.sleep(10000);
					} catch (InterruptedException e1) {
						// TODO Auto-generated catch block
						e1.printStackTrace();
					}
				}
				catch (MalformedURLException e1) {
					// TODO Auto-generated catch block
					e1.printStackTrace();
				} catch (IOException e1) {
					// TODO Auto-generated catch block
					e1.printStackTrace();
				} catch (InterruptedException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				} 
			}

			if(mPendingList.size() <= 0 && mPageMap.size() < mMaxPages)
			{
				mOutput.OutputPrint("[STOPPED]...No more page to read!");
				mCrawlerEvent.OnProcessLink(mMaxPages, "", null, mIsRel);
			}

		}
	}

	private void FillPendingList(Document doc)
	{
		Elements links = doc.select("a[href]");
		for (Element link : links) {
			String strLink = link.attr("abs:href");
			if(mPendingList.contains(strLink) 
					|| strLink.indexOf("#") != -1 
					|| strLink.isEmpty())
				continue;

			try {
				URL url = new URL(strLink);
				if(url.getHost().compareTo(mHost) == 0	        
						&& !mPendingList.contains(strLink) 
						&& !mVisitedList.contains(strLink))
				{
					if(!mIsSmartCrawler && !strLink.contains(seedURL.getPath()))
						continue;
					mOutput.OutputPrint(String.format("Add link to pending list: <%s>", strLink));
					mPendingList.add(strLink);
				}
			} catch (MalformedURLException e) {
				// TODO Auto-generated catch block
				//e.printStackTrace();
				System.err.println(String.format("Invalid URL<%s>", strLink));
			}
		}
	}

	private String GetPageTextContent(Document doc)
	{
		String strRet = "";
		strRet = doc.body().text();
		return strRet;
	}

	private String GetPageTitle(Document doc)
	{
		String strRet = "";
		Element title = doc.select("title").first();
		strRet = title.text();
		return strRet;
	}

	@Override
	public void run() {
		// TODO Auto-generated method stub
		super.run();
		if(mCrawlerEvent != null)
			mCrawlerEvent.OnPreProcessLink(0, mSeedURL, null, mIsRel);
		try {
			StartCrawler();
			if(mCrawlerEvent != null)
				mCrawlerEvent.OnPostProcessLink(0, mSeedURL, null, mIsRel);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}


}
