package com.yufei.infoExtractor.task;

import it.sauronsoftware.cron4j.TaskExecutionContext;


import java.lang.ref.WeakReference;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.core.task.TaskExecutor;



import com.yufei.infoExtractor.cache.CacheFactory;
import com.yufei.infoExtractor.cache.InfoExtractorCache;
import com.yufei.infoExtractor.core.LinkAction;
import com.yufei.infoExtractor.extractor.ContentExtractor;
import com.yufei.infoExtractor.extractor.UrlExtractor;
import com.yufei.infoExtractor.io.HttpDataRetriever;
import com.yufei.infoExtractor.io.DataRetrieverFactory;
import com.yufei.infoExtractor.io.DataRetrieverFeatures;
import com.yufei.infoExtractor.pfw.InfoExtractorDao;
import com.yufei.infoExtractor.pfw.entity.Pattern;
import com.yufei.infoExtractor.pfw.entity.Relatedlink;
import com.yufei.infoExtractor.pfw.entity.Seedsite;
import com.yufei.infoExtractor.pfw.entity.Task;
import com.yufei.infoExtractor.pfw.impl.InfoExtractorDaoMongodImpl;
import com.yufei.infoExtractor.util.AppUtil;
import com.yufei.infoExtractor.util.CommonUtil;
import com.yufei.infoExtractor.util.EncryptUtil;
import com.yufei.infoExtractor.util.ExceptionUtil;
import com.yufei.infoExtractor.util.PatternUtils;

/**
 * created by @author jasstion at 2012-9-17
 * 此对象继承Cron4j.Task,具体的执行调度交友cron4j的Schedular
 */
public class InfoExtractorCommonTask  extends  it.sauronsoftware.cron4j.Task{
	private static Log mLog = LogFactory.getLog(InfoExtractorCommonTask.class);
    
	private Task task;
	private InfoExtractorDao crawlerDao;
	private List<Map> taskContext=new ArrayList();
    


		public Task getTask() {
		return task;
	}
	public void setTask(Task task) {
		this.task = task;
	}



		public InfoExtractorCommonTask(Task task) {
			super();
			// TODO Auto-generated constructor stub
			this.task=task;
		}
		private void init() {
			 crawlerDao= (InfoExtractorDao) AppUtil.getBeanFromBeanContainer(InfoExtractorDao.class);


	    	//创建此次任务的执行环境（包括各个种子网址的有关信息能够被外界知道比如：系统监控进程）
	    	List<Seedsite> seeds=task.getSeedsites();//crawlerDao.getSeedSiteByTaskName(task.getTaskName());
	    	Map contextForSeedsite =null;
	    	for(Seedsite seed:seeds){
	    	    try {
	    	    	contextForSeedsite= this.buildContextForSeedsite(seed);

					
				} catch (Exception e) {
					// TODO Auto-generated catch block
					mLog.info("初始化种子上下文失败,种子信息是："+seed.toString()+"\n"+"具体错误信息是："+ExceptionUtil.getExceptionDetailsMessage(e));
					continue;
				}
	    	    
	    	    this.taskContext.add(contextForSeedsite);
	    		
	    	}
		}
	
		
		
		
		
		
		
		
		


	//change able
		private Map buildContextForSeedsite(Seedsite seed) throws Exception{
			
			String classFullName=task.getEntityFullName();
			Map context=new HashMap();
			CacheFactory<String,WeakReference<String>> cacheFactory=CacheFactory.getInstance();
			InfoExtractorCache<String,WeakReference<String>>  cache=cacheFactory.createCache(CacheFactory.MemoryCacheType);
			context.put("urlCache", cache);
			//get ready for the regexes
			
			//获取entity中的所有field作为key放入到reg中并最终放入到context
			Pattern pattern=(Pattern) seed.getPattern();//crawlerDao.getPatternByTaskName(task.getTaskName()).get(0);
			
            pattern.synchronizeRegexMap();
			//do something for the context
	    	//links是个数组，大小为爬取深度+1；每个深度上爬取的所有关联链接都会保存到此数组中位置为每个深度的set集合对象中
	    	Set[] links=new HashSet[seed.getDepth()];
	    	for(Set s:links){
	    		s=new HashSet();
	    	}
            
	    	for(int i=0;i<seed.getDepth();i++){
	    	links[i]=new HashSet();
	    	}
	    	Relatedlink l=(Relatedlink) Class.forName(classFullName).newInstance();
	    	Class entityClass=Class.forName(classFullName);
	    	
	    	l.setLink(seed.getSiteName());
	    	//设置入口网址为links类型
	    	
	    	l.setHashCode(EncryptUtil.md5(l.getLink()));
	    	links[0].add(l.getLink());
	    	
	    	//链接内容缓存Map
	    	Map<String,String> linkContentCache=new HashMap<String,String>();
	    
	    	
	    	context.put("linkContentCache", linkContentCache);

		//	context.put("regexMap", pattern.getMatches());
	    	context.put("propertyMatches",pattern.getPropertyMatches());
	    	context.put("entityClass", entityClass);
	    	context.put("depth", seed.getDepth());
	    	//当前爬取深度，记录着当前爬取的深度
	    	context.put("currentDepth", 0);
	    	context.put("seed", seed);
	    	context.put("linkSet", links);
	      	context.put("protocolType", seed.getProtocolType());
	    	context.put("requestExecuteJs", seed.isRequestExecuteJs());
	    /*	//context.put("regexs", seed.getRegexs());
	  */
	    	//获取该网站已经抓取所有链接的指纹并存入全局变量context中,对于第一次爬取的网址其中的size为零
			Set<String> linkFingerPrints=crawlerDao.getLinkFingerprints(seed.getTaskName(),(Class)context.get("entityClass"));
			context.put("linkFingerPrints", linkFingerPrints);
		/*	Integer linkType=PatternUtils.judgeLinkType(context, seed.getSiteName());
	        mLog.info("判断入口地址连接类型为："+linkType);
	    	l.setLinkType(linkType);*/
			List<String> readyUrlList=new ArrayList<String>();
            
			context.put("readyUrlList", readyUrlList);

	    	
	    	HttpDataRetriever	dataRetriever=DataRetrieverFactory.createDataRetriever(new DataRetrieverFeatures(seed.isRequestExecuteJs(), seed.getProtocolType()));
	    	context.put("dataRetriever", dataRetriever);
			return context;
		}


		@Override
		public void execute(TaskExecutionContext arg0) throws RuntimeException {
			// TODO Auto-generated method stub
			
			init();
			mLog.info("the task "+task.getTaskName()+" starts running!");
			Timestamp startedTime=new Timestamp(System.currentTimeMillis());
			crawlerDao.setTaskTime(task.getId(),startedTime, null);
			crawlerDao.updateTaskStatus(Task.IS_RUNNING, task.getId());


			Seedsite seedsite=null;
		    crawlerDao.setTaskStartedTime(new Date(),task.getId());
		//	final CountDownLatch  taskCount=new CountDownLatch(taskContext.size());

			for(Map context:taskContext){
			//	context.put("taskCount", taskCount);

				seedsite=(Seedsite)context.get("seed");
				crawlerDao.updateSdStatus(Seedsite.IS_CRAWLERING, seedsite.getId());
				
				try {
					Set[] linkSet = (Set[]) context.get("linkSet");
					Integer depth=linkSet.length;
					Seedsite seed = (Seedsite) context.get("seed");
					mLog.info("开始对种子网站进行信息采集.............");
			        mLog.debug(seed.toString());
			        
					new  UrlExtractor(this).extract(context);
					traceUrlFetchList(context, linkSet, depth, seed);
					


					
				} catch (Exception e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
				
			}
		//	taskCount.await();
			mLog.info("the task "+task.getTaskName()+" finish running!");
			crawlerDao.setTaskTime(task.getId(), startedTime , new Timestamp(System.currentTimeMillis()));
			crawlerDao.setTaskEndTime(new Date(),task.getId());
			//结束所有线程
			
		}
		/**
		 * @param context
		 * @param linkSet
		 * @param depth
		 * @param seed
		 */
		private void traceUrlFetchList(Map context, Set[] linkSet, Integer depth,
				Seedsite seed) {
			    Iterator iterator = null;
				if(mLog.isTraceEnabled()){
					
			    	String str="此次抽取的url为一下列表：";
			    	mLog.trace(str+"\n\n\n");
			    	for(int i=0;i<depth.intValue();i++){
			    		iterator=linkSet[i].iterator();
			    		mLog.trace("********************************************************************************\n");

			    		mLog.trace("当前深度为："+i+"\n");
			    		while(iterator.hasNext()){
			    			mLog.trace(iterator.next()+"\n");
			    			
			    		}
			    		
			    	}
			    }
		}
		public boolean canBePaused() {
			return true;
		}

		public boolean canBeStopped() {
			return true;
		}

		public boolean supportsCompletenessTracking() {
			return true;
		}

		public boolean supportsStatusTracking() {
			return true;
		}
	
}
 
