/**
 * crawler
 *
 * outlook@China software studio
 * Copyright (c) 2008-2011. All Rights Reserved.
 * http://www.outlook.com
 */

package org.outlook.crawler.api.impl;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.outlook.common.cache.CacheService;
import org.outlook.common.log.LogUtil;
import org.outlook.crawler.client.api.Crawler;
import org.outlook.crawler.client.domain.Source;
import org.outlook.crawler.client.pojo.Response;
import org.outlook.crawler.client.pojo.ResponseType;
import org.outlook.crawler.client.pojo.Status;
import org.outlook.crawler.client.pojo.Webinfo;
import org.outlook.crawler.client.pojo.ResponseType.Type;
import org.outlook.crawler.database.CrawlerDataSource;
import org.outlook.crawler.service.CrawlerService;
import org.outlook.crawler.task.TaskFactory;
import org.outlook.crawler.util.CrawlerCacheUtil;
import org.outlook.crawler.util.ResponseUtil;
import org.outlook.crawler.util.parser.ELParser;

/**
 * function:
 * @author watson  email: watson.wu@hotmail.com
 * @date 2011-4-24
 */
public class CrawlerImpl implements Crawler {
	
	private Log logger = LogFactory.getLog(CrawlerImpl.class);
	
	private TaskFactory taskFactory;
	private CacheService cacheService;
	private CrawlerService crawlerService;
	private CrawlerDataSource crawlerDatasource;
    private boolean cacheEnable = false;

	public TaskFactory getTaskFactory() {
		return taskFactory;
	}

	public void setTaskFactory(TaskFactory taskFactory) {
		this.taskFactory = taskFactory;
	}

	public CacheService getCacheService() {
		return cacheService;
	}

	public void setCacheService(CacheService cacheService) {
		this.cacheService = cacheService;
	}

	public CrawlerService getCrawlerService() {
		return crawlerService;
	}

	public void setCrawlerService(CrawlerService crawlerService) {
		this.crawlerService = crawlerService;
	}

	public CrawlerDataSource getCrawlerDatasource() {
		return crawlerDatasource;
	}

	public void setCrawlerDatasource(CrawlerDataSource crawlerDatasource) {
		this.crawlerDatasource = crawlerDatasource;
	}

	public boolean isCacheEnable() {
		return cacheEnable;
	}

	public void setCacheEnable(boolean cacheEnable) {
		this.cacheEnable = cacheEnable;
	}

	@Override
	public Response crawlByKeyword(String keyword, Long sourceId, ResponseType responseType) {
		if(StringUtils.isBlank(keyword)) {
			LogUtil.debug(logger, "Keyword is invaild");
			return Response.newInvalidInstance();
		}
		if(null == sourceId) {
			LogUtil.debug(logger, "Source Id is invaild");
			return Response.newInvalidInstance();
		}
		
		if(isCacheEnable()) {
			final String cacheKey = CrawlerCacheUtil.getCacheKey(keyword, sourceId);
			Object object = getCacheService().get(cacheKey);
			if(null != object) {
				//judge responseType
				if(responseType.isSynchr()) {
					Type type = responseType.getType();
					return new Response(ResponseUtil.handle((Webinfo)object, type), Status.RESULT);
				} else {
					return new Response(null, Status.RESULT);
				}
			}
		}
		
		String url = getRequestURL(keyword, sourceId);
		if(StringUtils.isEmpty(url)) {
			LogUtil.debug(logger, "Invaild parameters");
			return Response.newInvalidInstance();
		}
		
		return crawlByUrl(url, sourceId, responseType);
	}

	private String getRequestURL(String keyword, Long sourceId) {
		Source source;
		Object cachObject = getCacheService().get(sourceId.toString());
		if(cachObject == null) {
			source = getCrawlerDatasource().getSourceById(sourceId);
			if(source == null) {
				LogUtil.debug(logger, "Invalid source id");
				return null;
			}
			getCacheService().put(sourceId.toString(), source);
		} else
			source = (Source) cachObject;
		
		return ELParser.parserURL(source.getApi(), keyword);
	}

	@Override
	public Response crawlByUrl(String url, Long sourceId, ResponseType responseType) {
		if(StringUtils.isBlank(url)) {
			LogUtil.debug(logger, "URL is invaild");
			return Response.newInvalidInstance();
		}
		if(null == sourceId) {
			LogUtil.debug(logger, "Source Id is invaild");
			return Response.newInvalidInstance();
		}
		
		if(isCacheEnable()) {
			final String cacheKey = CrawlerCacheUtil.getCacheKey(url, sourceId);
			Object object = getCacheService().get(cacheKey);
			if(null != object) {
				//TODO judge responseType 
				return new Response(object, Status.RESULT);
			}
		}
		Webinfo webinfo = getCrawlerService().getWebinfo(url, sourceId);
		if(null != webinfo) {
			//TODO judge responseType 
			return new Response(webinfo, Status.RESULT);
		}
		
		return getCrawlerService().crawl(url, sourceId, responseType);
	}

	@Override
	public Response crawlByUrl(String url, ResponseType responseType) {
		return crawlByUrl(url, 0L, responseType);
	}

	@Override
	public Response crawlByUrl(String from, String to, Long sourceId, ResponseType responseType) {
		if(StringUtils.isBlank(from)) {
			LogUtil.debug(logger, "From is invaild");
			return Response.newInvalidInstance();
		}
		if(StringUtils.isBlank(to)) {
			LogUtil.debug(logger, "To is invaild");
			return Response.newInvalidInstance();
		}
		if(null == sourceId) {
			LogUtil.debug(logger, "Source Id is invaild");
			return Response.newInvalidInstance();
		}
		
		if(crawlerService.isExisted(from, to, sourceId)) {
			//TODO judge responseType
			Object object = null;
			return new Response(object, Status.RESULT);
		}
		
		return crawlerService.crawl(from, to, sourceId, responseType);
	}

	@Override
	public Response crawlByUrl(String from, String to, ResponseType responseType) {
		return crawlByUrl(from, to, 0L, responseType);
	}

	@Override
	public void crawlWebsite(Long sourceId) {
		// TODO Auto-generated method stub
		
	}

}
