package com.foreveross.crawl.application.impl.task;

import java.io.FileInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;

import javax.inject.Inject;

import org.apache.commons.codec.digest.DigestUtils;
import org.apache.log4j.Logger;
import org.quartz.Scheduler;
import org.springframework.stereotype.Service;

import com.foreveross.GlobalMapOperator;
import com.foreveross.crawl.application.task.ITaskOperateApplication;
import com.foreveross.crawl.common.cfg.PropertyUtils;
import com.foreveross.crawl.common.cfg.system.SystemPropertiesLoader;
import com.foreveross.crawl.common.domain.BaseHBaseEntity;
import com.foreveross.crawl.common.em.TaskState;
import com.foreveross.crawl.common.exception.BusinessException;
import com.foreveross.crawl.dto.TaskModelDto;
import com.foreveross.crawl.vo.DataBody;
import com.foreveross.crawl.vo.TaskExcuteDetail;
import com.foreveross.enu.EnumTaskType;
import com.foreveross.fastdfs.FastDFSManager;
import com.foreveross.taskservice.common.TaskStatusEnum;
import com.foreveross.taskservice.common.application.crawlnode.ICrawlNodeBaseTaskApplication;
import com.foreveross.taskservice.common.bean.TaskModel;
import com.foreveross.util.ConcurrentTotalFileSizeWQueue;
import com.foreveross.util.DateUtil;
import com.foreveross.util.FileUtil;
import com.foreveross.util.GlobalVariable;
import com.foreveross.util.HttpClientUtil;
import com.foreveross.util.JsonUtil;
import com.foreveross.util.NodeInfoPropertyUtil;
import com.foreveross.util.PropertesCustom;

/**
 * @ClassName: TaskOperateApplicationImpl
 * @Description: 任务操作实现类
 * @author luofangyi
 * @date 2014-7-2 下午4:55:38
 * 
 */
@Service
//@Scope("prototype")
public class TaskOperateApplicationImpl implements ITaskOperateApplication {

	private Logger logger = Logger.getLogger(TaskOperateApplicationImpl.class);

	@Inject
	private ICrawlNodeBaseTaskApplication iCrawlNodeBaseTaskApplication;

	@Override
	public void updateTaskStatusOfSingle(TaskModelDto taskDto,
			Scheduler scheduler) throws Exception {

//		String jobName = taskDto.getJobName();
//		String groupName = taskDto.getGroupName();
//		String status = taskDto.getTaskState();
//		List<?> listBefore = scheduler.getCurrentlyExecutingJobs();
//		int size = scheduler.getMetaData().getThreadPoolSize();
//		Set pauseBefore = scheduler.getPausedTriggerGroups();
//		if (status != null) {
//			if (status.equalsIgnoreCase(TaskState.PAUSE.getType())) {// 暂停
//				scheduler.pauseJob(jobName, groupName);
//			} else if (status.equalsIgnoreCase(TaskState.CANCEL.getType())) {// 删除
//				scheduler.deleteJob(jobName, groupName);
//				// Trigger[] triggers = scheduler.getTriggersOfJob(jobName,
//				// groupName);
//				// scheduler.pauseJob(jobName, groupName);
//				// scheduler.unscheduleJob(triggers[0].getName(), groupName);
//				// 通知MQ，该任务失败
//				TaskModel taskOld = taskDto
//						.createTaskModelByRollbackTask(taskDto);
//				taskOld.setStatus(TaskStatusEnum.TASK_OTHER_FAIL.getStatus());
//				iCrawlNodeBaseTaskApplication
//						.roolbackTaskQueueRecordEntity(taskOld);
//			} else if (status.equalsIgnoreCase(TaskState.DOING.getType())) {// 恢复任务为进行中
//				scheduler.resumeJob(jobName, groupName);
//			} else {
//				logger.info("未定义的任务操作!");
//				throw new BusinessException("未定义的任务操作");
//			}
//			// 更新缓存中的任务状态
//			GlobalMapOperator.updateTaskState(taskDto.getChannelId(),
//					taskDto.getId(), status);
//
//			List<?> listAfter = scheduler.getCurrentlyExecutingJobs();
//			int sizeAfter = scheduler.getMetaData().getThreadPoolSize();
//			Set pauseAfter = scheduler.getPausedTriggerGroups();
//		}
	}

	@Override
	public void updateTaskStatusOfBatch(List<TaskModelDto> taskDtos,
			Scheduler scheduler) throws Exception {

		for (TaskModelDto taskDto : taskDtos) {
			this.updateTaskStatusOfSingle(taskDto, scheduler);
		}
	}

	@Override
	public void persistenceListByJson(List<?> obj,
			String fileName, TaskModel model, boolean isAppend) {

		synchronized (TaskOperateApplicationImpl.class) {
			Long channelId = model.getChannel().getId();
			String taskId = model.getId();
			int flightTotoal = obj != null ? obj.size() : 0;
			String taskType = null;
			List<DataBody> bodyList = new ArrayList<DataBody>();
			DataBody body = null;
			String rowKey = null;
			Object data = null;
			for (Object o : obj) {
				try {
					if (Class.forName("com.foreveross.crawl.domain.airfreight.doub.DoublePlaneInfoEntity").isInstance(o)) {
						taskType = EnumTaskType.DOUBLE.getType();
					} else if (Class.forName("com.foreveross.crawl.domain.airfreight.single.SinglePlaneInfoEntity").isInstance(o)) {
						taskType = EnumTaskType.SINGLE.getType();
					}
				} catch (ClassNotFoundException e) {
					logger.error(e);
				}
				// TODO 后续有新需求，可再增加
				
				BaseHBaseEntity baseInfo = (BaseHBaseEntity) o;
				rowKey = baseInfo.generateRowKey();
				data = JsonUtil.toJson(baseInfo, new String[] {
						"id", "baseEntityRepository", "planeInfoEntity" });
				body = new DataBody(rowKey, data);

				if (body != null) {
					bodyList.add(body);
				}
			}
			TaskExcuteDetail taskDetail = new TaskExcuteDetail(channelId,
					taskId, flightTotoal, taskType, bodyList);
			final String spilte = "###foreveross_webcrawl_separator###";
			// 将解析结果写入本地文件
			FileUtil.writeParseFile(fileName,
					JsonUtil.toJson(taskDetail, new String[] {}) + spilte,
					isAppend);
			logger.info(String.format("成功写入%s渠道信息到文件", model.getChannel().getName()));
		}
	}

	@Override
	public void dealParseJson(List<?> obj, TaskModel model) {

		//数据持久化中心，文件下载信息设置URL
		final String persistenceUrl = PropertyUtils.getProperty(PropertesCustom.SYSTEM_DATAPERSISTENCE_DOWNLOAD_URL, SystemPropertiesLoader.FILE_NAME, "");
		//重试上传次数
		final int reTry = Integer.parseInt(PropertyUtils.getProperty(PropertesCustom.SYSTEM_DATAPERSISTENCE_DOWNLOAD_RETRY_COUNT, SystemPropertiesLoader.FILE_NAME, "3"));
		//上传阀值
		final long uploadThreshold = Long.parseLong(NodeInfoPropertyUtil.getValueByProperty(NodeInfoPropertyUtil.NODE_UPLOAD_MAX_SIZE, "10485760"));//默认10M
		synchronized (TaskOperateApplicationImpl.class) {
			boolean isAppend = true;
			long fileSize = getDirectorySize();
			if (fileSize >= uploadThreshold) {
				isAppend = false;
			}
			logger.info("获取当前文件大小为:" + fileSize + "  是否追加文件：" + isAppend);
			if (!isAppend) {
				logger.info("准备对文件进行压缩处理");
				zipFileAndDeal(persistenceUrl, reTry, fileSize);
			}
			logger.info("准备写入文件");
			this.persistenceListByJson(obj, GlobalVariable.parse_filename,
					model, isAppend);
		}
	}

	private synchronized long getDirectorySize() {
		long fileSize = 0;
		// 1.获取文件夹大小
		try {
			fileSize = new ConcurrentTotalFileSizeWQueue()
					.getTotalSizeOfFile(FileUtil.getParseFilePath());
		} catch (InterruptedException e) {
			logger.error(e.getMessage(), e);
		}
		return fileSize;
	}

	private synchronized void zipFileAndDeal(String persistenceUrl, int reTry,
			long fileSize) {
			// 压缩文件名为节点序列号 +　当前时间(纳秒数)
			String zipName = NodeInfoPropertyUtil
					.getValueByProperty(NodeInfoPropertyUtil.NODE_SN)
					+ "-" + DateUtil.date2String(new Date(), "yyyyMMddHHmmss") + "-"
					+ Long.toString(System.nanoTime());
			String zipPath = FileUtil.zipOriginalFile(zipName);
			logger.info(String.format("成功将解析文件压缩成%s", zipPath));
			try {
				dealZip(zipPath, zipName, fileSize);
			} catch (Exception e) {
				logger.error(e.getMessage(), e);
			}
	}

	public synchronized void dealZip(String zipPath, String zipName,
			long fileSize) throws Exception {
		InputStream inputStream = new FileInputStream(zipPath);
		String md5Str = DigestUtils.md5Hex(inputStream);
		if (inputStream != null) {
			inputStream.close();
		}
		// 数据持久化中心，文件下载信息设置URL
		String persistenceUrl = PropertyUtils.getProperty(
				PropertesCustom.SYSTEM_DATAPERSISTENCE_DOWNLOAD_URL,
				SystemPropertiesLoader.FILE_NAME, "");
		// 重试上传次数
		int reTry = Integer.parseInt(PropertyUtils.getProperty(
				PropertesCustom.SYSTEM_DATAPERSISTENCE_DOWNLOAD_RETRY_COUNT,
				SystemPropertiesLoader.FILE_NAME, "3"));
		logger.info(String.format("压缩文件md5码：%s", md5Str));
		uploadFile(zipPath, zipName, persistenceUrl, md5Str, reTry, fileSize);
	}

	private synchronized String uploadFile(String zipPath, String zipName,
			String persistenceUrl, String md5Str, int reTry, long fileSize)
			throws Exception {
		logger.info("开始上传文件到fastdfs...");
		Map<String, String> uploadInfo = FastDFSManager.getInstance().upload(
				zipPath);
		logger.info(String.format("fastdfs反馈信息:%s", uploadInfo));
		String msg = sendDownloadInfo(persistenceUrl, md5Str, uploadInfo,
				zipName, fileSize);
		logger.info("数据持久化中心反馈消息:" + msg);
		if (msg != null && msg.contains("true")) {
			logger.info("任务抓取到验证过程已完成");
		} else if (msg != null && msg.contains("false")) {
			while (reTry-- > 0) {
				logger.info(String.format("倒数第%s次上传重试!", reTry));
				uploadFile(zipPath, zipName, persistenceUrl, md5Str, reTry,
						fileSize);
			}
			FileUtil.renameFile(zipName, zipName + FileUtil.exceptionSuffix);
			logger.info(String.format("最后一次上传重试失败，重命名后文件名为：%s", zipName
					+ "_uploadFail"));
		} else {
			logger.info(msg);
			FileUtil.renameFile(zipName, zipName + FileUtil.exceptionSuffix);
			logger.info(String.format("上传失败，重命名后文件名为：%s", zipName
					+ "_uploadFail"));
		}
		//是否删除fastdfs服务器上的文件
		if(PropertyUtils.getBooleanProperty(PropertesCustom.SYSTEM_FASTDFS_FILE_IS_DELETE, SystemPropertiesLoader.FILE_NAME)){
			FastDFSManager.getInstance().deleteFile(
					uploadInfo.get("groupName"),
					uploadInfo.get("remoteFileName"));
			FileUtil.deleteForlder(zipPath);
		}
		return msg;
	}

	private synchronized String sendDownloadInfo(String persistenceUrl,
			String md5Str, Map<String, String> uploadInfo, String originalName,
			long fileSize) throws Exception {
		Map<String, String> headMap = new HashMap<String, String>();
		Map<String, String> params = new HashMap<String, String>();
		params.put("md5", md5Str);
		params.put("downloadUrl", uploadInfo.get("downloadUrl"));
		params.put("createDate",
				DateUtil.date2String(new Date(), "yyyyMMddHHmmss"));
		params.put("originalName", originalName);// 压缩文件原始名称
		params.put("fileSize", Long.toString(fileSize));
		int retry = PropertyUtils.getIntProperty(
				PropertesCustom.SYSTEM_DATAPERSISTENCE_CONNECT_RETRY_COUNT,
				SystemPropertiesLoader.FILE_NAME, 3);
		int waitTime = PropertyUtils.getIntProperty(
				PropertesCustom.SYSTEM_DATAPERSISTENCE_CONNECT_RETRY_TIME,
				SystemPropertiesLoader.FILE_NAME, 3000);
		logger.info(String.format("通知数据持久化中心下载文件，发送消息：%s", params));
		String responseMsg = requestDataPersistence(persistenceUrl, headMap,
				params, retry, waitTime);
		logger.info(String.format("持久化中心反馈消息为:", responseMsg));
		return responseMsg;
	}

	private synchronized String requestDataPersistence(String persistenceUrl,
			Map<String, String> headMap, Map<String, String> params, int retry,
			int waitTime) throws Exception, InterruptedException {
		String responseMsg = "";
		try {
			responseMsg = HttpClientUtil.getInstance().execute(persistenceUrl,
					params, headMap, "utf-8");
		} catch (org.apache.http.conn.ConnectTimeoutException cte) {
			if (retry-- > 0) {
				logger.info(String.format("连接数据持久化中心倒数第%s次重试!", retry));
				responseMsg = requestDataPersistence(persistenceUrl, headMap,
						params, retry, waitTime);
				Thread.sleep(waitTime);
			}
		}
		return responseMsg;
	}

	@Override
	public void uploadFile(String persistenceUrl, int reTry) {

		synchronized (TaskOperateApplicationImpl.class) {
			long fileSize = getDirectorySize();
			if (fileSize > 0) {// 指定文件夹大小超过0时，上传文件并对其进行操作
				zipFileAndDeal(persistenceUrl, reTry, fileSize);
				// 处理完之后删除目录
				FileUtil.deleteParseForlder();
			}
		}
	}

}
