package com.foreveross.mofang.datapersistence.application.impl;

import java.io.File;
import java.io.FileFilter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

import javax.inject.Inject;
import javax.inject.Named;

import net.sf.json.JSONObject;

import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;

import com.foreveross.mofang.datapersistence.application.FileDataManageApplication;
import com.foreveross.mofang.datapersistence.application.HbaseDataOperationApplication;
import com.foreveross.mofang.datapersistence.application.vo.FileInfo;
import com.foreveross.mofang.datapersistence.infra.Constant;
import com.foreveross.mofang.datapersistence.infra.DBType;
import com.foreveross.mofang.datapersistence.infra.service.IDataPersistenceMsgService;
import com.foreveross.mofang.datapersistence.infra.utils.CacheUtils;
import com.foreveross.mofang.datapersistence.infra.utils.MoFangFileUitl;
import com.foreveross.mofang.datapersistence.infra.utils.MoFangLogger;
import com.foreveross.mofang.datapersistence.infra.utils.PropertiesUtil;
import com.foreveross.mofang.datapersistence.infra.utils.impl.StringFileReadCallback;
import com.foreveross.taskservice.common.bean.TaskModel;

/**
 * 一句话描述
 * 
 * @author fb
 */
@SuppressWarnings({ "unchecked", "deprecation" })
@Named
public class FileDataManageApplicationImpl implements FileDataManageApplication {
	
	Logger logger = MoFangLogger.FILEOPERATION.getLogger();
	
	private CacheUtils cache = CacheUtils.newInstance().instanceCache(CacheUtils.CacheInstance.FOREVER);

	// 文件下载校验重试次数
	private final static int executions = 3;
	// 数据持化异常计数
	private final ThreadLocal<Integer> exceptionCount = new ThreadLocal<Integer>();
	
	@Inject
	private HbaseDataOperationApplication hbaseDataOperationApplication;

	@Inject
	IDataPersistenceMsgService msgService;
	
	public void scansDirectory(String directory) {
		logger.info("定时扫描文件数据夹开始, 扫描路径[{}]", directory);
		
		try {
			fileUnzipReaderPersistence(directory);
		} catch (Exception e) {
			logger.error("文件夹中文件读取发生异常!", e);
		}

	}

	public boolean fstDFSFileDown(FileInfo info) {
		Properties sysPro = PropertiesUtil.getProperties(Constant.PROPERTIES_SYSTEM, CacheUtils.CacheInstance.SYSTEM);
		boolean pass = true;
		int time = 0;
		String extractDir = null;
		
		logger.info("准备下载文件[{}]", info.getOriginalName());
		
		while (time < executions) {
			try {
				File zipFile = MoFangFileUitl.fastDFSFileDown(mapFDFSUrl(info), sysPro.getProperty("system.data.FileDownPath"));
				
				if (!MoFangFileUitl.fileMD5Check(zipFile, info.getMd5())) {	
					MoFangFileUitl.delete(zipFile);
					throw new Exception("md5值校验和不通过!");
				}
				
				extractDir = MoFangFileUitl.unZip(zipFile);									
				MoFangFileUitl.delete(zipFile);
				MoFangFileUitl.asyncReadDirectoryFile(extractDir, new StringFileReadCallback(Constant.FILEDATE_SEPARATOR) {	// 文件解析
					
					@Override
					public void execute(String data) {
						end = System.currentTimeMillis();
						dataPersistence(data);
						logger.info("此条任务文件读取操作耗时 {} 毫秒", end - start);
						start = System.currentTimeMillis();
						logger.info("-------------------------------------------------------");
					}
				});

				// 文件hbase入库
				hbaseDataOperationApplication.saveDataFileInfo(info);
				pass = true;
				break;
			} catch (Exception e) {
				pass = false;
				time++;
				logger.error("文件处理操作第{}次异常，系统将尝试重试{}次!", new Object[]{time, executions, e});
			}
		}
		
		return pass;
	}
	
	public void disasterDataPersistence(String directory) {
		logger.info("项目启动容灾数据持久化操作开始，路径[{}]!", directory);
		
		try {
			fileUnzipReaderPersistence(directory);
		} catch (Exception e) {
			logger.error("项目启动容灾数据持化到数据库出错", e);
		}
	}
	
	private String mapFDFSUrl(FileInfo info) {
		Properties sysPro = PropertiesUtil.getProperties(Constant.PROPERTIES_FDFS_NETWORKMAP, CacheUtils.CacheInstance.SYSTEM);
		String outerNet = info.getIp();
		String intranet = sysPro.getProperty(outerNet);
		
		if (StringUtils.isBlank(intranet)) {
			return info.getDownloadUrl();
		}
		
		return info.getDownloadUrl().replace(outerNet, intranet);
	}
	
	private void fileUnzipReaderPersistence(String directory) throws Exception {
		String extractDir = null;
		File dir[] = new File(directory).listFiles(new FileFilter() {
			 
			public boolean accept(File pathname) {
				return pathname.getName().endsWith("zip");
			}
		});

		if (dir == null) {
			logger.warn("指定文件夹[{}]不存在!", directory);
			return;
		}

		if (dir.length < 1) {
			logger.info("指定文件夹[{}]暂无数据文件!", directory);
			return;
		}

		for (File zipFile : dir) {
			// 文件hbase入库
			hbaseDataOperationApplication.saveDataFileInfo(new FileInfo(zipFile.getName(), zipFile.length(), zipFile.lastModified()));
			
			extractDir = MoFangFileUitl.unZip(zipFile);
			MoFangFileUitl.delete(zipFile);
			MoFangFileUitl.readDirectoryFile(extractDir, new StringFileReadCallback(Constant.FILEDATE_SEPARATOR) {

						@Override
						public void execute(String data) {
							end = System.currentTimeMillis();
							dataPersistence(data);
							logger.info("此条任务文件读取操作耗时 {} 毫秒", end - start);
							start = System.currentTimeMillis();
							logger.info("-------------------------------------------------------");
						}
					});

			MoFangFileUitl.delete(new File(extractDir));
		}

	}

	
	/**
	 * 数据入库操作
	 * 
	 * dataJson的json格式
	 * <pre>
	 * 	{
	 * 		channelId  :  id,
	 * 		taskId   :  id,
	 * 		taskType :  taskType,
	 * 		data     :  [
	 * 			{
	 * 				rowKey : rowKey,
	 * 				data   : {
	 * 					// 真实适配器给过来的数据
	 * 				}
	 * 			}
	 * 		]
	 * 	}
	 * 
	 * </pre>
	 * 
	 */ 
	private void dataPersistence(String dataJson) {
		Properties sysPro = PropertiesUtil.getProperties(Constant.PROPERTIES_SYSTEM, CacheUtils.CacheInstance.SYSTEM);
		Map<String, Object> channels = cache.get(Constant.CACHE_CHANNEL_MAP); 
		long startSerialize = System.currentTimeMillis();
		Map<String, Object> dataMap = JSONObject.fromObject(dataJson);
		long endSerialize = System.currentTimeMillis();
		Map<String, String> channel = (Map<String, String>) channels.get(dataMap.get("channelId").toString().trim());
		List<Map<String, Object>> datas = (List<Map<String, Object>>) dataMap.get("data");
		Map<String, String> dbTable = JSONObject.fromObject(channel.get("dbTable"));
		int countTime = Integer.parseInt(sysPro.getProperty("system.dataPersistence.errorTime")); // 出错次数
		int errorTime = 0;
		boolean isError = false;
		exceptionCount.set(0);
		
		logger.info("数据反序列化耗时：{} 毫秒, 内含size：{} ,渠道id：{}, sn:{}, name:{}", new Object[]{endSerialize - startSerialize, datas.size(), channel.get("id"), channel.get("sn"), channel.get("name")});
		
		long startPersistence = System.currentTimeMillis();
		
		if (DBType.HBASE == DBType.getDBTypeByLable(channel.get("dbType"))) {
			if (Boolean.parseBoolean(sysPro.getProperty("system.dataPersistence.isBatch", "false"))) {
				do {
					try {
						hbaseDataOperationApplication.batchAddRecord(dbTable.get(dataMap.get("taskType")), datas);
					} catch (Exception e) {
						isError = true;
						disasterRecovery(++errorTime, sysPro, datas, dataMap);
						logger.error("hbase表{}批量添加taskId为{}数据时第{}次出错，", new Object[] { dbTable.get(dataMap.get("taskType")), dataMap.get("taskId"), errorTime , e} );
					}
				} while (isError && errorTime < countTime);
				
				return;
			}
			
			for (Map<String, Object> data : datas) {
				errorTime = 0;
				isError = false;
				
				do {
					try {
						hbaseDataOperationApplication.addRecord(dbTable.get(dataMap.get("taskType")), data);
					} catch (Exception e) {
						isError = true;
						disasterRecovery(++errorTime, sysPro, data, dataMap);
						logger.error("hbase表{}添加rowKey为{}数据时第{}次出错，", new Object[] { dbTable.get(dataMap.get("taskType")), data.get("rowKey"), errorTime , e} );
					}
				} while (isError && errorTime < countTime);
			}
		}
		
		long endPersistence = System.currentTimeMillis();
		logger.info("{} 条数据全部入库化耗时：{} 毫秒：", datas.size(), endPersistence - startPersistence);
		
		// 到时消息发送不调用 此方法
		TaskModel task = new TaskModel();
		task.setId(dataMap.get("taskId").toString());
		task.setCrawlCount(datas.size());
		task.setExcuteCount(exceptionCount.get());
		
		msgService.roolbackTaskQueueRecordEntity(task);
		//删除计数 
		exceptionCount.remove();
	}
	
	
	/**
	 * 做数据容灾处理，把已出错的数据持久化到文件中
	 * 保存json数据格式如下：
	 * <pre>
	 * 	 {
	 * 		channelId  : 
	 * 		taskId	   :
	 * 		data	   : [{
	 * 				// 当前出错数据map对象json后数据
	 * 		}]
	 *	 }
	 * </pre>
	 * @param errorTime
	 *            出错次数
	 * @param sysPro
	 *            配制对象
	 * @param data
	 *            出错数据
	 * @param channelId
	 *            当前的渠道id
	 * @param taskId
	 *            当前对应的任务id
	 */
	private void disasterRecovery(int errorTime, Properties sysPro, Map<String, Object> data, Map<String, Object> dataMap) {
		int sleepTime = Integer.parseInt(sysPro.getProperty("system.dataPersistence.error.sleepTime")); // 休眠时间
		int error = exceptionCount.get();
		List<Map<String, Object>> datas = new ArrayList<Map<String,Object>>();
		datas.add(data);
		
		if (!threadSleepCheck(errorTime, sysPro)) {
			return;
		}
		
		exceptionCount.set(++error);
		appendFile(datas, dataMap, sleepTime);
	}
	
	/**
	 * @see com.foreveross.mofang.datapersistence.application.impl.FileDataManageApplicationImpl#disasterRecovery
	 * @param errorTime
	 * @param sysPro
	 * @param data
	 * @param dataMap
	 */
	private void disasterRecovery(int errorTime, Properties sysPro, List<Map<String, Object>> datas, Map<String, Object> dataMap) {
		int sleepTime = Integer.parseInt(sysPro.getProperty("system.dataPersistence.error.sleepTime")); // 休眠时间
		if (!threadSleepCheck(errorTime, sysPro)) {
			return;
		}
		
		exceptionCount.set(datas.size());
		appendFile(datas, dataMap, sleepTime);
	}

	private void appendFile(List<Map<String, Object>> datas, Map<String, Object> dataMap, int sleepTime) {
		Map<String, Object> toJsonMap = null;
		String rowKey = datas.size() == 1 ? datas.get(0).get("rowKey").toString() : "";
		toJsonMap = new HashMap<String, Object>();
		toJsonMap.put("channId", dataMap.get("channelId"));
		toJsonMap.put("taskId", dataMap.get("taskId"));
		toJsonMap.put("taskType", dataMap.get("taskType"));
		toJsonMap.put("data", datas);
		
		MoFangFileUitl.appendFile(JSONObject.fromObject(toJsonMap).toString());
		
		MoFangLogger.DATA_SAVEERROR.getLogger().error(
				"数据持化异常，channelId:{} taskId:{} taskType:{} size:{} rowKey:{}",
				new Object[] { dataMap.get("channelId"), dataMap.get("taskId"), dataMap.get("taskType"), datas.size(), rowKey});

		try {
			if (sleepTime > 0) {
				TimeUnit.SECONDS.sleep(sleepTime);
				logger.info("线程{}己进入休眠状态!", Thread.currentThread().getName());
			}
		} catch (InterruptedException e) {
			logger.error("线程{}休眠出错", Thread.currentThread().getName(), e);
		}
	}
	
	
	/**
	 * 线程休眠
	 * 
	 * @param errorTime
	 * @param sysPro
	 * @return true:需要休眠
	 */
	private boolean threadSleepCheck(int errorTime, Properties sysPro) {
		int time = Integer.parseInt(sysPro.getProperty("system.dataPersistence.errorTime")); // 出错次数
		boolean isSleep = false;
		
		if (errorTime >= time) {
			isSleep = true;
		}
		
		return isSleep;
	}
	
}
