package com.crawler.application;

import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;

import com.crawler.base.CrawlerBase;
import com.crawler.base.ICrawlerEventHandler;
import com.crawler.base.Page;

public class MainWndController implements ICrawlerEventHandler{

	private Crawler appWnd;
	private ResultWnd resultWnd;
	private CrawlerBase crawlerbase;
	private Logger logger;
	private int mCrawlerType;
	
	public MainWndController()
	{
		logger = new Logger();
		crawlerbase = new CrawlerBase(logger, this);
	}
	
	public void AddMainWnd(Crawler wnd)
	{
		appWnd = wnd;
		appWnd.UpdateSettings(CrawlerBase.DefaultPoliteness, CrawlerBase.DefaultMaxPages, CrawlerBase.DefaultSeedPage);
	}
	
	public void StartCrawler(int nType, int nPoliteness, int nMaxPages, String strURL, String strURL2)
	{
		mCrawlerType = nType;
		crawlerbase.Init();
		resultWnd = new ResultWnd(mCrawlerType, nMaxPages, this);
		
		for(String strFeature : crawlerbase.GetFeatureList())
		{
			resultWnd.InsertFeature(strFeature);
		}
		resultWnd.setVisible(true);
		
		if(mCrawlerType == 0)
		{
			crawlerbase.StartCrawler(nPoliteness, nMaxPages, strURL, this, true);
			if(!strURL2.isEmpty())
				crawlerbase.StartCrawler(nPoliteness, nMaxPages, strURL2, this, false);
		}
		else if(mCrawlerType == 1)
		{
			crawlerbase.StartSmartCrawler(nPoliteness, nMaxPages, strURL, this);
		}
	}
	
	public String GetPageContent(String strTitle, boolean bIsRel)
	{
		String strRet="";
		Page page = crawlerbase.GetPage(strTitle, bIsRel);
		if(page == null)
			return strRet;
		strRet += "=== Feature Stats ===\n";
		for (String strKey : page.GetFeatureSet().keySet())
		{
			Integer nCount = page.GetFeatureSet().get(strKey);
			strRet += String.format("%s : %d\n", strKey, nCount);
		}
		
		strRet += "\n";
		strRet += "=== Page URL ===\n";
		strRet += page.GetPageURL();
		strRet += "\n\n";
		strRet += "=== Page Title ===\n";
		strRet += page.GetTitle();
		strRet += "\n\n";
		strRet += "=== Page Content ===\n";
		strRet += page.GetContent();
		return strRet;
	}
	
	public void ProcessAllPages()
	{
		crawlerbase.ProcessAllPages();
	}
	
	public void AddFeature(String strFeature)
	{
		if(crawlerbase.AddFeature(strFeature))
		{
			resultWnd.InsertFeature(strFeature);
		}
	}
	
	public void RemoveFeature(String strFeature)
	{
		if(crawlerbase.RemoveFeature(strFeature))
		{
			resultWnd.RemoveFeature(strFeature);
		}
	}
	
	public List<String> GetFeature()
	{
		return crawlerbase.GetFeatureList();
	}
	
	public void GenerateARFF()
	{
		String strPath = "";
		if(mCrawlerType == 0)
			strPath = "training.arff";
		else
			strPath = "test.arff";
		
		ProcessAllPages();
		crawlerbase.GenerateARFF(strPath);
	}
	
	public void SwapGroup(String strURL, boolean bSrcIsRel)
	{
		crawlerbase.MovePageToNextGroup(strURL, bSrcIsRel);
		resultWnd.InsertTitle(strURL, strURL, !bSrcIsRel);
		resultWnd.RemoveTitle(strURL, bSrcIsRel);
	}
	
	@Override
	public void OnPreProcessLink(int nPageIndex, String strURL, Page page, boolean bIsRel) {
		// TODO Auto-generated method stub
		
	}

	@Override
	public void OnProcessLink(int nPageIndex, String strURL, Page page, boolean bIsRel) {
		// TODO Auto-generated method stub
		try {
			URL url = new URL(strURL);
			if(strURL.isEmpty() && page == null)
			{
				resultWnd.UpdateProgress(nPageIndex, "No More Page To Read!", bIsRel);
			}
			else
			{
				resultWnd.UpdateProgress(nPageIndex, url.getPath(), bIsRel);
				resultWnd.InsertTitle(page.GetTitle(), strURL, bIsRel);
			}

		} catch (MalformedURLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}

	@Override
	public void OnPostProcessLink(int nPageIndex, String strURL, Page page, boolean bIsRel) {
		// TODO Auto-generated method stub
		
	}
}
