package com.foreveross.crawl;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import javax.servlet.ServletContextEvent;

import org.apache.commons.lang.StringUtils;
import org.apache.log4j.PropertyConfigurator;
import org.dayatang.domain.InstanceFactory;
import org.dayatang.ioc.spring.factory.SpringInstanceProvider;
import org.quartz.CronExpression;
import org.quartz.JobDetail;
import org.quartz.Scheduler;
import org.quartz.impl.JobDetailImpl;
import org.quartz.impl.StdSchedulerFactory;
import org.quartz.impl.triggers.CronTriggerImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.context.ContextLoaderListener;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;

import com.foreveross.GlobalMapOperatorBeta1;
import com.foreveross.JsonStringOperator;
import com.foreveross.crawl.application.impl.computer.ComputerInstanceByStrategy;
import com.foreveross.crawl.common.application.ITaskModelEntityApplication;
import com.foreveross.crawl.common.cfg.PropertyUtils;
import com.foreveross.crawl.common.cfg.system.EnginePropertiesLoader;
import com.foreveross.crawl.common.cfg.system.SystemPropertiesLoader;
import com.foreveross.crawl.common.util.SpringHelper;
import com.foreveross.crawl.config.ConfigLoader;
import com.foreveross.crawl.corn.SimpleCronTriggerBuilder;
import com.foreveross.crawl.dto.TaskStatisticsDto;
import com.foreveross.crawl.vo.ChannelInfo;
import com.foreveross.crawl.vo.MachineInfo;
import com.foreveross.crawl.vo.Message;
import com.foreveross.crawl.vo.TaskStatisticsBeta1;
import com.foreveross.crawl.vo.TaskStatisticsVO;
import com.foreveross.crawl.workers.CrawlBaseTaskClearWorker;
import com.foreveross.crawl.workers.CrawlBaseTaskWorkerBeta2;
import com.foreveross.crawl.workers.FilePollWorker;
import com.foreveross.crawl.workers.SendHeartbeatWorker;
import com.foreveross.util.ClientAdapterPropertiesLoader;
import com.foreveross.util.HttpClientUtil;
import com.foreveross.util.NodeInfoPropertyUtil;
import com.foreveross.util.PropertesCustom;

/**
 * 抓取引擎启动入口l
 */
public class CrawlEngineStartUp extends ContextLoaderListener {

	private static Logger logger = LoggerFactory
			.getLogger(CrawlEngineStartUp.class);

	public void startup(ServletContextEvent event) throws Exception {
		// 1、log配置文件
		logger.info("加载log配置文件......");
		PropertyConfigurator.configure(CrawlEngineStartUp.class
				.getClassLoader().getResource("log4j.properties"));
		// 2、系统属性文件，xml等统一注册加载器
		logger.info("抓取引擎系统配置文件统一注册......");
		ConfigLoader.initLoader();
		// 3、spring容器加载
		logger.info("加载spring容器支持......");
		super.contextInitialized(event);
		// ApplicationContext ctx = new
		// ClassPathXmlApplicationContext("spring/spring.xml");
		WebApplicationContext applicationContext = WebApplicationContextUtils
				.getRequiredWebApplicationContext(event.getServletContext());
		// koala注册
		SpringInstanceProvider springProvider = new SpringInstanceProvider(
				applicationContext);
		InstanceFactory.setInstanceProvider(springProvider);

		SpringHelper.setApplicationContext(applicationContext);
		//启动时获取机器信息
		Message message = ComputerInstanceByStrategy.getInstance().getComputerMessage();
		
		//从服务中心获取组及渠道信息
		this.buildGroupAndChannel(message);		

		sendRegisterInfo(message);
		
		// 4、初始化计数器
		logger.info("引擎计数器实例化......");
		CrawlEngineMonitor.getInstance().show();
		// 5、初始化任务组管理器
		logger.info("启动队列......");
		startWork();

	}

	private void initStatisticsTask(List<Long> channelIdList) {
		ITaskModelEntityApplication iTaskModelEntityApplication = (ITaskModelEntityApplication)SpringHelper.getBean(ITaskModelEntityApplication.class);
		//删除任务模型中无效的数据
		iTaskModelEntityApplication.deleteValidTaskModel();
//		List<TaskStatisticsVO> taskStatisticsVOList= iTaskModelEntityApplication.statisticsTask();
//		List<TaskStatisticsDto> dtoList = TaskStatisticsVO.getObtainTaskBillBeta2(taskStatisticsVOList);
//		for(Long channelId : channelIdList){
//			TaskStatisticsBeta1 taskStatisticsBeta1 = GlobalMapOperatorBeta1.getTaskStatisticsBeta1ByChannelId(channelId);
//			GlobalMapOperatorBeta1.putTaskStatisticsBeta1Info(channelId, taskStatisticsBeta1);
//		}
//		for(TaskStatisticsDto dto : dtoList){
//			TaskStatisticsBeta1 taskStatisticsBeta1 = GlobalMapOperatorBeta1.getTaskStatisticsBeta1ByChannelId(dto.getChannelId());
//			taskStatisticsBeta1.setTaskSuccess(dto.getTaskSuccess());
//			taskStatisticsBeta1.setTaskException(dto.getTaskException());
//			taskStatisticsBeta1.setTaskTotal(dto.getTaskTotal());
//			GlobalMapOperatorBeta1.putTaskStatisticsBeta1Info(dto.getChannelId(), taskStatisticsBeta1);
//		}
	}

	/**
	 * 启动引擎
	 * 
	 * @throws Exception
	 */
	public void startWork() throws Exception {
		Scheduler scheduler = new StdSchedulerFactory().getScheduler();//CrawlContext.getInstance().getScheduler();
		scheduler.getListenerManager().addSchedulerListener(new TaskMonitorSchedulerListener());
		scheduler.start();
		logger.info("加入常规任务执行器");
		startBaseJob(scheduler);
		logger.info("加入定时清除任务缓存任务");
		clearAllTaskJob(scheduler);
		logger.info("加入心跳包发送任务");
		sendHeartbeadJob(scheduler);
		if(!PropertyUtils.getBooleanProperty(PropertesCustom.SYSTEM_DB_ISSTORAGE,SystemPropertiesLoader.FILE_NAME)){
			logger.info("加入文件夹扫描任务");
			scanTaskJob(scheduler);
		}
	}

	/**
	 * 常规任务执行
	 * 
	 * @param scheduler
	 * @return
	 */
	public boolean startBaseJob(Scheduler scheduler) {
		JobDetail channelBaseJob = null;
		try {
			//这里做一个扩展，不同渠道采用不同策略
			long  interval =  PropertyUtils.getIntProperty(
					EnginePropertiesLoader.ENGINE_CHANNEL_GETTASK_INTERVAL,
					EnginePropertiesLoader.FILE_NAME, 120000);
			String start = PropertyUtils.getProperty(
					EnginePropertiesLoader.ENGINE_TASK_CRAWL_TIME_BEGINE,
					EnginePropertiesLoader.FILE_NAME, "00:00");
			String end = PropertyUtils.getProperty(
					EnginePropertiesLoader.ENGINE_TASK_CRAWL_TIME_END,
					EnginePropertiesLoader.FILE_NAME, "20:00");
			
			List<Long> channelDoSet = new ArrayList<Long>();
			Set<Long> channelSet = GlobalMapOperatorBeta1.getAllChannel();
			Integer[] channels = PropertyUtils.getIntPropertys("engine.channel.id.contains",
					EnginePropertiesLoader.FILE_NAME, ",");
			if(channels != null && channels.length > 0){
				for(Integer c : channels){
					long t = new Long(c);
					if(channelSet.contains(t)){
						channelDoSet.add(t);
					}
				}
			}else{
				long channelIdBegine = PropertyUtils.getIntProperty(
						EnginePropertiesLoader.ENGINE_CHANNEL_ID_BEGINE,
						EnginePropertiesLoader.FILE_NAME);
				long channelIdEnd = PropertyUtils.getIntProperty(
						EnginePropertiesLoader.ENGINE_CHANNEL_ID_END,
						EnginePropertiesLoader.FILE_NAME);
				for(long i = channelIdBegine; i <= channelIdEnd; i ++ ){
					if(channelSet.contains(i)){
						channelDoSet.add(i);
					}
				}
			}
			if (channelDoSet.isEmpty()) {
				logger.error("渠道参数不正确,系统退出!");
				System.exit(0);
			}
			//初始化任务的统计信息，因为已完成、异常的任务持久化到本地文件中了
			initStatisticsTask(channelDoSet);
			for (Long channel : channelDoSet) {
				interval = PropertyUtils.getIntProperty(
						"engine.channel.gettask.interval.channel"+channel,
						EnginePropertiesLoader.FILE_NAME, (int)interval);
				channelBaseJob = new JobDetailImpl(
						CrawlContext.CRAWL_TASK_CHANNEL_BASE + channel
								+ System.currentTimeMillis(),
						CrawlContext.CRAWL_TASK_CHANNEL_BASE,
						CrawlBaseTaskWorkerBeta2.class);
				channelBaseJob.getJobDataMap().put(
						CrawlContext.CRAWL_CHANNEL_ID, channel);
				scheduler.scheduleJob(channelBaseJob, SimpleCronTriggerBuilder
						.buildSimpleTriggerAddDailyCalendar(scheduler,
								interval, start, end));
//				Thread.sleep(500);
			}
		} catch (Exception e) {
			logger.error("常规任务执行启动异常，系统退出!", e);
			System.exit(0);
		}
		return true;
	}

	@Override
	public void contextDestroyed(ServletContextEvent arg0) {
		logger.info("容器监听器己销毁!");
	}

	@Override
	public void contextInitialized(ServletContextEvent event) {
		logger.info("提示: 抓取引擎开始运行......");

		try {
			startup(event);
		} catch (Exception e) {
			logger.error("启动时出现未知异常，系统已停止运行....", e);
			System.exit(0);
		}
	}

	private void buildGroupAndChannel(Message message) throws Exception {
		String channelUrl = PropertyUtils.getProperty(PropertesCustom.SYSTEM_REMOTE_IP_URL, SystemPropertiesLoader.FILE_NAME, "") + PropertyUtils.getProperty(PropertesCustom.SYSTEM_CHANNEL_URL, SystemPropertiesLoader.FILE_NAME, "");
		String channelGroupUrl = PropertyUtils.getProperty(PropertesCustom.SYSTEM_REMOTE_IP_URL, SystemPropertiesLoader.FILE_NAME, "") + PropertyUtils.getProperty(PropertesCustom.SYSTEM_CHANNELGROUP_URL, SystemPropertiesLoader.FILE_NAME, "");
		String groupsUrl = PropertyUtils.getProperty(PropertesCustom.SYSTEM_REMOTE_IP_URL, SystemPropertiesLoader.FILE_NAME, "") + PropertyUtils.getProperty(PropertesCustom.SYSTEM_ALLGROUP_URL, SystemPropertiesLoader.FILE_NAME, "");
		
		
		Map<String, String> params = new HashMap<String, String>();
		Map<String, String> heads = new HashMap<String, String>();
		Map<String ,Object> result = new HashMap<String, Object>();
		//用于记录日志
		params.put("nodeSn", this.builderParameter(message).get("nodeSn"));
		String channel = "";
		String groupRoot = "";
		String groups = "";
		for(int i = 0; i < 3; i++){//如果发生连接超时
			try {
				channel = HttpClientUtil.getInstance().execute(
						channelUrl, params, heads, "UTF-8");
				groupRoot = HttpClientUtil.getInstance().execute(
						channelGroupUrl, params, heads, "UTF-8");
				groups = HttpClientUtil.getInstance().execute(groupsUrl,
						params, heads, "UTF-8");
				break;
			} catch (org.apache.http.conn.ConnectTimeoutException timeoutExcepiton) {
				logger.error(String.format("连接控制服务台第%s次重试", i));
				Thread.sleep(8000);
				if(i == 2){
					throw timeoutExcepiton;
				}
			}
		}
		if(!JsonStringOperator.containsErrorInfo(channel, new String[]{"error"})){
			result.put("channel", channel);
//			List<Long> channelIdList = JsonStringOperator.analyzeAllChannelIdByJson(channel);
			List<Long> channelIdList = new ArrayList<Long>();
//			Map<Long, String> channelMap = JsonStringOperator.analyzeAllChannelByJson(channel);
			Map<Long, ChannelInfo> channelMap = JsonStringOperator.analyzeAllChannelDetailByJson(channel);
			for(Long channelId : channelMap.keySet()){
				channelIdList.add(channelId);
//				GlobalMapOperator.putChannelMap(channelId, channelMap.get(channelId));
				GlobalMapOperatorBeta1.putChannelDetail(channelId, channelMap.get(channelId));
			}
			GlobalMapOperatorBeta1.putChannelToChanelAndTask(channelIdList);
		}
		if(!JsonStringOperator.containsErrorInfo(groupRoot, new String[]{"error"})){
			result.put("groupRoot", groupRoot);
		}
		if(!JsonStringOperator.containsErrorInfo(groups, new String[]{"error"})){
			result.put("groups", groups);
		}
		
		if (StringUtils.isBlank(channel) || StringUtils.isBlank(groupRoot) || StringUtils.isBlank(groups)) {
			throw new RuntimeException("组或渠道为空!");
		}

		GlobalMapOperatorBeta1.setCache("channelGoup", result);
	}

	/**
	 * 适配旧版渠道
	 * 
	 * @param startChannel
	 * @param endChannel
	 * @return
	 */
	private List<Long> adapterOldChannel(long startChannel, long endChannel) {
		List<Long> retList = new ArrayList<Long>();
		for (long id = startChannel; id <= endChannel; id++) {
			retList.add(id);
		}
		return retList;
	}
	
	/**
	 * 清空任务信息
	 * @param scheduler
	 */
	private void clearAllTaskJob(Scheduler scheduler){
		try {
			String cronStr = PropertyUtils.getProperty(
					PropertesCustom.ENGINE_TASK_CRAWL_TIME_CLEAR_TASK,
					EnginePropertiesLoader.FILE_NAME, "0 55 23 * * ?");

			JobDetail clearJob = new JobDetailImpl("CLEAR_TASK_JOB", "CLEAR_TASK_GROUP", CrawlBaseTaskClearWorker.class);
			CronTriggerImpl cronTrigger = new CronTriggerImpl("CLEAR_TRIGGER" + UUID.randomUUID(), "TG_CLEAR");
			CronExpression cronExp = new CronExpression(cronStr);
			cronTrigger.setCronExpression(cronExp);
			scheduler.scheduleJob(clearJob, cronTrigger);
		} catch (Exception e) {
			logger.error("MQ任务清空任务执行启动异常!", e);
//			System.exit(0);
		}
	}
	
	/**
	 * 扫描文件夹任务
	 * @param scheduler
	 */
	private void scanTaskJob(Scheduler scheduler){
		try {
			String cronStr = PropertyUtils.getProperty(
					PropertesCustom.SYSTEM_SCAN_PARSE_TIME,
					SystemPropertiesLoader.FILE_NAME, "0 0/4 * * *");

			JobDetail scanJob = new JobDetailImpl("SCAN_TASK_JOB", "SCAN_TASK_GROUP", FilePollWorker.class);
			CronTriggerImpl cronTrigger = new CronTriggerImpl("SCAN_TRIGGER" + UUID.randomUUID(), "TG_CLEAR");
			CronExpression cronExp = new CronExpression(cronStr);
			cronTrigger.setCronExpression(cronExp);
			scheduler.scheduleJob(scanJob, cronTrigger);
		} catch (Exception e) {
			logger.error("文件扫描任务执行启动异常!", e);
//			System.exit(0);
		}
	}
	
	/**
	 * 发送注册请求
	 * @param message
	 */
	private void sendRegisterInfo(Message message){
		String ipRemoteUrl = PropertyUtils.getProperty(PropertesCustom.SYSTEM_REMOTE_IP_URL, SystemPropertiesLoader.FILE_NAME, "");
		String registerUrl = PropertyUtils.getProperty(PropertesCustom.SYSTEM_REGISTER_URL, SystemPropertiesLoader.FILE_NAME, "");
//		String headUrl = PropertyUtils.getProperty(SystemPropertiesLoader.SYSTEM_HEART_URL, SystemPropertiesLoader.FILE_NAME, "");
		try {
			HttpClientUtil httpClient = HttpClientUtil.getInstance();
			Map<String, String> headMap = new HashMap<String, String>();
			//TODO  看成功与否，以后可增加headMap相应配置
			
			String responseMsg = httpClient.execute(ipRemoteUrl + registerUrl, builderParameter(message), headMap, "utf-8");
			logger.info("注册反馈信息: " + responseMsg);
		} catch (Exception e1) {
			logger.error(e1.getMessage());
		}
	}

	/**
	 * 构建注册请求参数
	 * @param message
	 * @return
	 */
	private Map<String, String> builderParameter(Message message) {
		Map<String, String> retMap = new HashMap<String, String>();
		String port = PropertyUtils.getProperty(
				PropertesCustom.SYSTEM_PORT,
				SystemPropertiesLoader.FILE_NAME, "8080");
		
		String nodeName = NodeInfoPropertyUtil.getValueByProperty(NodeInfoPropertyUtil.NODE_NAME, "node1");
		String contextName = PropertyUtils.getProperty(
				PropertesCustom.SYSTEM_CONTEXT_NAME,
				SystemPropertiesLoader.FILE_NAME, "webcrawl-web");
		String clientAdapterVersion =  PropertyUtils.getProperty(
				ClientAdapterPropertiesLoader.CLIENT_ADAPTER_VERSION,
				ClientAdapterPropertiesLoader.FILE_NAME, "1.0");
		MachineInfo machineInfo = ((List<MachineInfo>)message.getBody()).get(0);
		
				
		String ip = "";
		boolean obtainIp = PropertyUtils.getBooleanProperty(PropertesCustom.SYSTEM_NET_OBTAIN_IP, SystemPropertiesLoader.FILE_NAME);
		if(obtainIp){//是否从联网获取IP
			try {
				ip = obtainNetIp();
			} catch (Exception e) {
				logger.error(e.getMessage());
			}
		}
		if(ip == ""){
			ip = machineInfo.getIp();
		}
		String nodeSn = ip + ":" +  port + "-" + nodeName;//obtainNodeSn();
		//将nodeSn写入属性文件
				NodeInfoPropertyUtil.writeProperty(NodeInfoPropertyUtil.NODE_SN, nodeSn);
		retMap.put("clientAdapterVersion", clientAdapterVersion);
		retMap.put("ip", ip);
		retMap.put("name", nodeName);
		retMap.put("nodeSn", nodeSn);
		retMap.put("taskUrl", "http://" + ip + ":" + port + "/" + contextName);
		retMap.put("port", port);
		return retMap;
	}

	/**  
	 * @Description: 通过配置文件获取节点序列号，不存在则用过UUID生成
	 * @return
	 * @author luofangyi
	 * @date 2014-7-9 下午2:52:21 
	 */ 
	private String obtainNodeSn() {
		String nodeSn = NodeInfoPropertyUtil.getValueByProperty(NodeInfoPropertyUtil.NODE_SN);
		if(nodeSn == null || nodeSn.equals("")){
			nodeSn = java.util.UUID.randomUUID().toString();
			NodeInfoPropertyUtil.writeProperty(NodeInfoPropertyUtil.NODE_SN, nodeSn);
		} else {
			nodeSn = NodeInfoPropertyUtil.getValueByProperty(NodeInfoPropertyUtil.NODE_SN);
		}
		return nodeSn;
	}
	
	/**
	 * 发送心跳包
	 * @param scheduler
	 */
	private void sendHeartbeadJob(Scheduler scheduler){
		try {
			String cronStr = PropertyUtils.getProperty(
					PropertesCustom.SYSTEM_HEART_TIME,
					SystemPropertiesLoader.FILE_NAME, "0 0/7 * * * ?");

			JobDetail clearJob = new JobDetailImpl("SEND_HEARTBEAT_TASK_JOB", "SEND_HEARTBEAT_TASK_GROUP", SendHeartbeatWorker.class);
			CronTriggerImpl cronTrigger = new CronTriggerImpl("SEND_HEARTBEAT" + UUID.randomUUID(), "TG_CLEAR");
			CronExpression cronExp = new CronExpression(cronStr);
			cronTrigger.setCronExpression(cronExp);
			scheduler.scheduleJob(clearJob, cronTrigger);
		} catch (Exception e) {
			logger.error("常规任务执行启动异常，系统退出!", e);
//			System.exit(0);
		}
	}
	
	/**  
	 * @Description: 联网获取IP
	 * @return
	 * @author luofangyi
	 * @throws Exception 
	 * @date 2014-7-14 下午12:05:35 
	 */ 
	private String obtainNetIp() throws Exception{
		
		String url = "http://20140507.ip138.com/ic.asp";
		HttpClientUtil httpClient = HttpClientUtil.getInstance();
		String responseMsg = httpClient.executeByGet(url);
		String regEx = "\\[(.+?)\\]";
		return getPatternString(responseMsg, regEx);
	}
	
	private String getPatternString(String content, String regEx){
		Pattern p=Pattern.compile(regEx, Pattern.DOTALL|Pattern.MULTILINE);
		Matcher m=p.matcher(content);
		String retStr = "";
		if(m.find()){
			retStr=  m.group(1);
		}
		return retStr.trim().replaceAll("&nbsp;", "");
	}
}
