/**
 * crawler
 *
 * outlook@China software studio
 * Copyright (c) 2008-2010. All Rights Reserved.
 * http://www.outlook.com
 */

package org.outlook.crawler.util;

import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.outlook.common.config.Configurations;
import org.outlook.common.log.LogUtil;
import org.outlook.common.net.URLUtil;
import org.outlook.common.queue.ThreadPoolManager;
import org.outlook.crawler.client.domain.Source;
import org.outlook.crawler.client.pojo.HttpStatus;
import org.outlook.crawler.client.pojo.Webinfo;
import org.outlook.crawler.task.ImpostorTask;
import org.outlook.crawler.util.parser.WebinfoProcessor;

/**
 * function:
 * @author Watson email: watson.wu@hotmail.com
 * @date 2011
 */
@SuppressWarnings("unchecked")
public class WebsiteCrawler {
	
	private static final String CRAWLER_REPOSITORY = "crawler.repository";
	private final Log logger = LogFactory.getLog(WebsiteCrawler.class);

	
	public final static String FILTERS = ".*(\\.(css|bmp|gif|jpe?g|png|tiff?|mid|mp2|mp3|mp4|wav|avi|mov|mpeg|ram|m4v|pdf|rm|smil|wmv|swf|wma|zip|rar|gz|asp|aspx|php|jsp))$";
	public final static String[] CONVERS = {".asp",".php",".jsp",".aspx"};
	public static File target;
	public static int updateCycle = 43200000;
	public static ThreadPoolManager<ImpostorTask> tpm = ThreadPoolManager.newInstance();
	public Source source;
	
	
	public void crawl() throws Exception {
		crawl(source);
	}

	public static void crawl(String domain) throws Exception {
		if(StringUtils.isEmpty(domain)) return ;
		
		Source source = new Source();
		source.setUrl(domain);
		source.setUpdateCycle(updateCycle);
		new WebsiteCrawler(source).crawl(source);
	}
	
	private void crawl(Source source) throws Exception {
		Fetcher fetcher = new Fetcher();
		source.setUpdateCycle(updateCycle);
		Webinfo webinfo = fetcher.fetch(source.getDomain(), source);
		if(webinfo != null && webinfo.getStatus() == HttpStatus.SC_OK) {
			WebinfoProcessor webinfoProcessor = new WebinfoProcessor();
			webinfoProcessor.parseAndSave(webinfo, source);
			
			//TODO start app to monitor task handle status
			while(tpm.getQueueSize() == 0) {
				sleep(1000);
			}
			
			//TODO report
		}
	}
	
	private static void sleep(int seconds) {
		try {
			Thread.sleep(seconds * 1000);
		} catch (Exception e) {
		}
	}
	
	private void init() throws IOException {
		try {
			String index = source.getUrl();
			String repoDir = Configurations.getStringProperty(CRAWLER_REPOSITORY, null);
			if(StringUtils.isEmpty(index) || StringUtils.isEmpty(repoDir))
				LogUtil.error(logger, "error argurments");
			
			if(!repoDir.endsWith("/"))
				repoDir += "/";
				
			target = new File(repoDir);
			if(target.exists() && !target.isDirectory())
				LogUtil.error(logger, "error argurments");
			if(!target.exists())
				target.createNewFile();
			
			URL url = new URL(index);
			if(StringUtils.isNotBlank(url.getFile())) {
				index = index.substring(0, index.lastIndexOf("/"));
			}
			source.setDomain(URLUtil.getDomainName(index));
			
		} catch (MalformedURLException e) {
			throw e;
		} catch (IOException e) {
			throw e;
		}
	}
	
	public WebsiteCrawler(String indexUrl) throws IOException {
		this(new Source(indexUrl));
		source.setUpdateCycle(updateCycle);
	}
	
	public WebsiteCrawler(Source source) throws IOException {
		this.source = source;
		init();
	}

}
