package cn.tiansu.eway.datafile.manager;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;

import org.apache.log4j.Logger;
import org.springframework.jdbc.core.JdbcTemplate;

import cn.tiansu.eway.common.utils.CmdUtils;
import cn.tiansu.eway.common.utils.DBConnection;
import cn.tiansu.eway.common.utils.PropertyConfigurator;
import cn.tiansu.eway.datafile.model.ChangeDataInfo;
import cn.tiansu.eway.datafile.model.ChangeDataReq;
import cn.tiansu.eway.datafile.model.Response;
import cn.tiansu.eway.datafile.model.SamplesInfo;
import cn.tiansu.eway.kafka.ijava.ConsumerConsumption;
import cn.tiansu.eway.kylinengine.service.KYLINEngineService;
import cn.tiansu.eway.kylinengine.utils.DataTimeUtils;
import cn.tiansu.eway.kylinengine.utils.JsonTools;

import com.alibaba.fastjson.JSONObject;

/**
 * 数据文件服务 监听
 * 
 * @Title: FileListener.java
 * @Description: TODO
 * @author: 吴鹏
 * @date: 2015年9月9日
 * 
 *
 */
public class FileListener extends ConsumerConsumption
{
	private static final Logger log = Logger.getLogger(FileListener.class);

	private static FileListener instance = new FileListener();

	/**
	 * 租户模型文件和维度数据文件路径
	 */
	private String path = PropertyConfigurator.getInstance(null,
			"config.properties").getParameter("dataFilePath");

	/**
	 * 租户临时fact数据文件备份路径
	 */
	private String tempPath = PropertyConfigurator.getInstance(null,
			"config.properties").getParameter("dataFileTempPath");

	/**
	 * 租户fact数据文件路径
	 */
	private String bakPath = PropertyConfigurator.getInstance(null,
			"config.properties").getParameter("dataFileBakPath");

	private String prefix = PropertyConfigurator.getInstance(null,
			"config.properties").getParameter("prefix");

	private KYLINEngineService kylinService = KYLINEngineService.getInstance();

	/**
	 * 数据库别名
	 */
	private String alias = "mysql";

	private static final String regionFileName = "vw_dim_region.txt";

	private static final String categoryFileName = "vw_dim_category.txt";

	private static final String factFileName = "fact_samples.txt";

	private static final String changeFactFileName = "change_samples";

	private static final String bu1FileName = "dim_business1.txt";

	private static final String bu2FileName = "dim_business2.txt";

	private static final String bu3FileName = "dim_business3.txt";

	/**
	 * 文件备份日期
	 */
	private static final String fileBackupday = PropertyConfigurator
			.getInstance(null, "config.properties").getParameter(
					"fileBackupday");

	/**
	 * 数据着色关键字
	 */
	private static final String changeData = "changeData";

	/**
	 * key=租户，value=数据列表
	 */
	private Map<String, List<String>> fileMap = new ConcurrentHashMap<String, List<String>>();

	/**
	 * key=租户，value=时间段
	 */
	private Map<String, Set<String>> changeForTimeMap = new ConcurrentHashMap<String, Set<String>>();

	private FileListener()
	{
	}

	/**
	 * 实例化
	 * 
	 * @return instance
	 */
	public static FileListener getInstance()
	{
		return instance;
	}

	public static void main(String[] args)
	{
		// Log4jInitializer.init("");
		// String message =
		// "{\"body\":{\"changeData\":[{\"tenant\":\"tiansu\",\"time\":\"2015-10-14\"}]}}";
		// FileListener l = new FileListener();
		// // l.consumptionMsg(message);

		String bodyStr = "{\"samples\":[{\"dim\":{\"business3\":\"3\",\"business2\":\"2\",\"business1\":\"1\",\"time\":\"4853610\",\"category\":\"8\",\"region\":\"21\"},\"sensor\":\"root/weijiwei/weijiwei63661\",\"time\":\"2016-02-22 03:25:00.000\",\"sample\":\"12410.000000\",\"tenant\":\"weijiwei\",\"inc\":\"30.000000\",\"status\":\"2\"}]}";
		Response res = JsonTools.jsonToObject(bodyStr, Response.class);

		List<SamplesInfo> samples = res.getSamples();

		if (samples == null)
		{
			return;
		}

		System.out.println(samples.get(0).getSensor());
		System.out.println(samples.get(0).getTime());
		System.out.println(samples.get(0).getStatus());
	}

	/**
	 * 侦听二次采样数据服务，并自动生成数据文件
	 */
	@Override
	public void consumptionMsg(String message)
	{
		log.debug("message is " + message);

		JSONObject jsonObject = JSONObject.parseObject(message);
		String bodyStr = jsonObject.getString("body");

		// 手动修复和数据着色
		if (bodyStr.indexOf(changeData) > -1)
		{
			ChangeDataReq req = JsonTools.jsonToObject(bodyStr,
					ChangeDataReq.class);
			if (req != null && req.getChangeData() != null)
			{
				Operator op = new Operator();

				for (ChangeDataInfo info : req.getChangeData())
				{
					log.debug("########info=" + info.toString());
					op.deal(changeForTimeMap, info);
				}

			}
		}
		// 普通数据文件生成
		else
		{
			log.debug("bodyStr is " + bodyStr);

			Response res = JsonTools.jsonToObject(bodyStr, Response.class);

			List<SamplesInfo> samples = res.getSamples();

			if (samples == null)
			{
				return;
			}

			for (SamplesInfo info : samples)
			{
				if (!"0".equals(info.getStatus()))
				{
					continue;
				}

				String line = dealStrNull(info.getSensor()) + ","
						+ dealStrNull(info.getInc()) + ","
						+ dealStrNull(info.getSample()) + "," + "" + ","
						+ dealStrNull(info.getDim().getTime()) + ","
						+ dealStrNull(info.getDim().getRegion()) + ","
						+ dealStrNull(info.getDim().getCategory()) + ","
						+ dealStrNull(info.getDim().getBusiness1()) + ","
						+ dealStrNull(info.getDim().getBusiness2()) + ","
						+ dealStrNull(info.getDim().getBusiness3()) + ","
						+ dealStrNull(info.getTime().split(" ")[0]) + ","
						+ dealStrNull(info.getStatus());

				List<String> dataList = fileMap.get(info.getTenant());
				if (dataList != null)
				{
					dataList.add(line);
				} else
				{
					List<String> newList = new ArrayList<String>();
					newList.add(line);
					fileMap.put(info.getTenant(), newList);
				}
			}

			// 生成事实表数据文件
			makeFactSampleFile();
		}

	}

	/**
	 * 将内存中标记的指定时间段内的数据写文件
	 */
	public void writeChanageDataToFile()
	{
		Map<String, Set<String>> map = new HashMap<String, Set<String>>();

		log.info("#####changeForTimeMap =" + changeForTimeMap);

		synchronized (changeForTimeMap)
		{
			map.putAll(changeForTimeMap);
			changeForTimeMap.clear();
		}

		// map的key为大写的tenant
		Iterator<String> it = map.keySet().iterator();
		JdbcTemplate jt = DBConnection.getInstance().getJdbcTemplate(alias);
		ChangeDataInfo info = new ChangeDataInfo();
		DataTimeUtils dt = new DataTimeUtils();

		while (it.hasNext())
		{
			String tenant = it.next();
			Set<String> timeSet = map.get(tenant);

			info.setTenant(tenant);

			String startTime = null;
			String endTime = null;

			for (String timeStr : timeSet)
			{
				info.setTime(timeStr);
				startTime = timeStr;

				endTime = dt.getNextDate(startTime, 0, 0, 1);

				// 对每一个时间点分别进行文件重新生成，
				// 注意：时间颗粒度不要有重合，比如不要同时出现 "2015-10-20" 和 "2015-10-20_10"
				makeFileForChangeData(info, jt);

				// 天文件
				if (timeStr.length() == 10)
				{
					for (int i = 0; i <= 23; i++)
					{
						File file = new File(bakPath
								+ info.getTenant().toUpperCase()
								+ File.separator + "fact_samples_"
								+ info.getTime() + "_" + i + ".txt");
						log.info("delete file [" + file.getAbsolutePath()
								+ "] is " + file.delete());
					}

				}
				// 小时文件
				else
				{
					File file = new File(bakPath
							+ info.getTenant().toUpperCase() + File.separator
							+ "fact_samples_" + info.getTime() + ".txt");
					log.info("delete file [" + file.getAbsolutePath() + "] is "
							+ file.delete());
				}

				File file = new File(bakPath + info.getTenant().toUpperCase()
						+ File.separator + changeFactFileName + "_"
						+ info.getTime());
				log.info("delete file [" + file.getAbsolutePath() + "] is "
						+ file.delete());

				// 将修改前的原有的备份文件删除，将修改后的文件覆盖
				doBakDataToHive(map.keySet());

				// 将hive中的数据清空，并将bak目录下特定租户的数据文件入库，等待下个周期进行统计
				cleanExpireHiveData(map.keySet());
			}

			log.debug("tenant=" + tenant + ",startTime=" + startTime
					+ ",endTime=" + endTime);
			kylinService.refreshCubeByTime(tenant, startTime, endTime);
		}

		map.clear();

	}

	/**
	 * 将指定租户temp目录下的文件迁移到bak目录
	 * 
	 * @param set
	 *            租户名称集合
	 */
	private void doBakDataToHive(Set<String> set)
	{
		// mv /home/eway/kylinOLAP/temp/tenant/change_samples*
		// /home/eway/kylinOLAP/bak/tenant/
		Iterator<String> it = set.iterator();
		while (it.hasNext())
		{
			String tenant = it.next();
			StringBuilder sb = new StringBuilder("mv ").append(tempPath)
					.append(tenant).append("/").append(changeFactFileName)
					.append("* ").append(bakPath).append(tenant).append("/");
			CmdUtils.execShell(sb.toString());
		}
	}

	/**
	 * 对于指定的修复数据创建数据文件
	 * 
	 * @param info
	 * @param jt
	 */
	private void makeFileForChangeData(ChangeDataInfo info, JdbcTemplate jt)
	{
		String tenant = prefix + "_" + info.getTenant().toLowerCase();
		String time = info.getTime();

		String year = time.substring(0, 4);
		String month = time.substring(5, 7);
		String day = time.substring(8, 10);

		// 2015-01-01
		String sql = "select time_id from " + tenant
				+ ".vw_dim_time where year=" + year + " and month="
				+ Integer.valueOf(month) + " and day=" + Integer.valueOf(day);

		log.info("sql : " + sql);
		String fileName = changeFactFileName + "_" + year + "-" + month + "-"
				+ day;

		// 2015-01-01_12
		if (time.length() == 13)
		{
			int hour = Integer.valueOf(time.substring(11, 13));
			sql += " and hour=" + hour;
			fileName += "_" + hour;
		}
		List<Map<String, String>> list = jt.queryForList(sql);

		StringBuilder sb = new StringBuilder();
		for (int i = 0; i < list.size(); i++)
		{
			if (i == 0)
			{
				sb.append(String.valueOf(list.get(i).get("time_id")));
			} else
			{
				sb.append(",").append(
						String.valueOf(list.get(i).get("time_id")));
			}
		}

		String tableName = tenant + ".fact_samples_" + year + "_" + month + "_"
				+ day;
		String querySql = "select * from " + tableName + " where time_id in ("
				+ sb.toString() + ")";
		log.info("querySql : " + querySql);

		List<Map<String, String>> dataList = jt.queryForList(querySql);

		List<String> resultList = new ArrayList<String>();
		for (Map<String, String> record : dataList)
		{
			String line = dealStrNull(record.get("sensor_uri")) + ","
					+ dealStrNull(record.get("sample_value")) + ","
					+ dealStrNull(record.get("sample_value2")) + ","
					+ dealStrNull(record.get("sample_value3")) + ","
					+ dealStrNull(record.get("time_id")) + ","
					+ dealStrNull(record.get("region_id")) + ","
					+ dealStrNull(record.get("category_id")) + ","
					+ dealStrNull(record.get("business1_id")) + ","
					+ dealStrNull(record.get("business2_id")) + ","
					+ dealStrNull(record.get("business3_id")) + ","
					+ year+"-"+month+"-"+day + ","
					+ dealStrNull(record.get("status"));
			resultList.add(line);
		}

		factdataToTempFile(info.getTenant(), resultList, fileName);
	}

	/**
	 * 事实表创建数据文件
	 */
	private void makeFactSampleFile()
	{
		Iterator<String> it = fileMap.keySet().iterator();

		while (it.hasNext())
		{
			String tenant = it.next();
			List<String> dataList = fileMap.get(tenant);

			synchronized (dataList)
			{
				factdataToTempFile(tenant.toUpperCase(), dataList, factFileName);
				it.remove();
			}
		}

	}

	/**
	 * 将数据生成模块生成的数据文件放到kylin的统计目录
	 */
	public void removeTempToDataFile()
	{
		// cp -fr /home/eway/KylinOLAP/temp/* /home/eway/KylinOLAP/data;
		// rm -rf /home/eway/KylinOLAP/temp/*
		String cmd = "cp -fr " + tempPath + "* " + path + ";rm -rf " + tempPath
				+ "*";
		CmdUtils.execShell(cmd);
	}

	/**
	 * 维度数据处理
	 */
	public void dimDataToFile()
	{
		String sqlTenant = "select name from " + prefix
				+ "_cloud.tb_tenant where enabled=1";

		JdbcTemplate jt = DBConnection.getInstance().getJdbcTemplate(alias);

		List<Map<String, String>> list = jt.queryForList(sqlTenant);

		try
		{
			for (Map<String, String> map : list)
			{
				String tenantName = map.get("name");
				String dataName = "eway_" + tenantName;
				String fileName = tenantName.toUpperCase();

				// 每个租户都要分别查询 地域维度，分类维度，业态1，业态2，业态3维度
				String sqlRegion = "select * from  " + dataName
						+ ".vw_dim_region";
				log.info("sqlRegion is " + sqlRegion);
				List<Map> regionData = jt.queryForList(sqlRegion);
				dataToFile(fileName, regionData, regionFileName);

				String sqlCategory = "select * from  " + dataName
						+ ".vw_dim_category";
				log.info("sqlCategory is " + sqlCategory);
				List<Map> categoryData = jt.queryForList(sqlCategory);
				dataToFile(fileName, categoryData, categoryFileName);

				String sqlBu1 = "select * from  " + dataName + ".dim_business1";
				log.info("sqlBu1 is " + sqlBu1);
				List<Map> bu1Data = jt.queryForList(sqlBu1);
				dataToFile(fileName, bu1Data, bu1FileName);

				String sqlBu2 = "select * from  " + dataName + ".dim_business2";
				log.info("sqlBu2 is " + sqlBu2);
				List<Map> bu2Data = jt.queryForList(sqlBu2);
				dataToFile(fileName, bu2Data, bu2FileName);

				String sqlBu3 = "select * from  " + dataName + ".dim_business3";
				log.info("sqlBu3 is " + sqlBu3);
				List<Map> bu3Data = jt.queryForList(sqlBu3);
				dataToFile(fileName, bu3Data, bu3FileName);
			}
		} catch (Exception e)
		{
			log.error("dimDataToFile is fail !!", e);
		}

	}

	/**
	 * 将数据变成文件
	 * 
	 * @param dirName
	 *            文件目录
	 * @param data
	 *            数据
	 * @param fileName
	 *            文件名
	 */
	public void dataToFile(String dirName, List<Map> data, String fileName)
	{
		if (data != null && data.size() > 0)
		{
			File dir = new File(path + dirName);

			log.info("dir is " + path + dirName);

			if (!dir.exists())
			{
				dir.mkdir();
			}

			File file = new File(path + dirName + File.separator + fileName);
			log.info("file is " + path + dirName + File.separator + fileName);

			FileWriter fw = null;
			PrintWriter pw = null;
			try
			{
				// 覆盖
				fw = new FileWriter(file);
				pw = new PrintWriter(new BufferedWriter(fw));

				for (Map<String, String> record : data)
				{
					switch (fileName)
					{
					case regionFileName:
						pw.println(getRegionDataLine(record));
						break;
					case categoryFileName:
						pw.println(getCategoryDataLine(record));
						break;
					case factFileName:
						pw.println(getFactDataLine(record));
						break;
					default:
						pw.println(getBusinesDataLine(record));
						break;
					}
				}
				pw.flush();

			} catch (IOException e)
			{
				log.error("", e);

			} finally
			{
				try
				{
					if (fw != null)
					{
						fw.close();
					}
					if (pw != null)
					{
						pw.close();
					}

				} catch (IOException e)
				{
					log.error("", e);
				}

			}

		}
	}

	/**
	 * 删除不需要的备份数据文件
	 * 
	 * @param bakPath
	 *            把bakPath目录下所有递归目录的文件按要求删除 return true=成功
	 */
	public boolean cleanBakFile(String bakPath)
	{
		File bakDir = new File(bakPath);
		if (!bakDir.isDirectory())
		{
			log.warn(bakPath + "is not exist");
			return false;
		}

		File[] fileList = bakDir.listFiles();
		for (File f : fileList)
		{

			if (f.isDirectory())
			{
				cleanBakFile(f.getPath());
			} else
			{
				String name = f.getName();

				// 对于修改文件
				if (name.contains(changeFactFileName))
				{
					if (name.length() < 25)
					{
						log.error("error fileName is " + f.getAbsolutePath());
						continue;
					}

					String timeStr = name.substring(15, 25);
					deleteExpireFile(timeStr, f, name);

				}
				// 对于原生的数据文件
				else if (name.contains("fact_samples"))
				{
					if (name.length() < 23)
					{
						log.error("error fileName is " + f.getAbsolutePath());
						continue;
					}
					String timeStr = name.substring(13, 23);

					deleteExpireFile(timeStr, f, name);
				}
			}
		}

		return true;
	}

	private void deleteExpireFile(String timeStr, File f, String name)
	{
		DataTimeUtils dt = new DataTimeUtils();
		Date fileTime = dt.getDateTime(timeStr);
		int day = fileBackupday == null ? 60 : Integer.valueOf(fileBackupday);
		Date bakTime = dt.getNextDate(0, 0, -day);

		// 删掉超过当前时间60天的数据文件，
		if (fileTime.before(bakTime))
		{
			boolean flag = f.delete();
			if (!flag)
			{
				log.warn("delete file [" + name + "] is fail!");
			}
		}
	}

	public void cleanExpireHiveData(Set<String> tenantSet)
	{
		Iterator<String> it = tenantSet.iterator();
		while (it.hasNext())
		{
			String tenantName = it.next();

			// 清除hive下所有租户的事实表数据
			StringBuilder truncateSb = new StringBuilder("hive -e \"use ")
					.append(tenantName).append(
							";truncate table fact_samples;\"");
			CmdUtils.execShell(truncateSb.toString());

			// 将被清理过的数据文件导入到hive中
			StringBuilder loadSb = new StringBuilder(
					"hive -e \"LOAD DATA LOCAL INPATH '").append(bakPath)
					.append(tenantName)
					.append("/' INTO TABLE " + tenantName + ".fact_samples;\"");
			CmdUtils.execShell(loadSb.toString());
		}
	}

	/**
	 * 事实表数据 生成文件
	 * 
	 * @param dirName
	 *            文件目录
	 * @param data
	 *            数据
	 * @param fileName
	 *            文件名
	 */
	private void factdataToTempFile(String dirName, List<String> data,
			String fileName)
	{
		File dir = new File(tempPath + dirName);
		if (!dir.exists())
		{
			dir.mkdir();
		}

		File file = new File(tempPath + dirName + File.separator + fileName);

		FileWriter fw = null;
		PrintWriter pw = null;
		try
		{
			// 追加
			fw = new FileWriter(file, true);
			pw = new PrintWriter(new BufferedWriter(fw));

			for (String record : data)
			{
				pw.println(record);
			}
			pw.flush();

		} catch (IOException e)
		{
			log.error("", e);

		} finally
		{
			try
			{
				if (fw != null)
				{
					fw.close();
				}
				if (pw != null)
				{
					pw.close();
				}

			} catch (IOException e)
			{
				log.error("", e);
			}

		}
	}

	/**
	 * 地区维度数据行
	 * 
	 * @param record
	 *            数据
	 * @return
	 */
	private String getRegionDataLine(Map record)
	{
		return dealStrNull(record.get("region_id")) + ","
				+ dealStrNull(record.get("province")) + ","
				+ dealStrNull(record.get("city")) + ","
				+ dealStrNull(record.get("district")) + ","
				+ dealStrNull(record.get("street")) + ","
				+ dealStrNull(record.get("building")) + ","
				+ dealStrNull(record.get("building_level1")) + ","
				+ dealStrNull(record.get("building_level2")) + ","
				+ dealStrNull(record.get("building_level3"));
	}

	/**
	 * 分类维度数据行
	 * 
	 * @param record
	 *            数据
	 * @return
	 */
	private String getCategoryDataLine(Map record)
	{
		return record.get("category_id") + ","
				+ dealStrNull(record.get("category_level1")) + ","
				+ dealStrNull(record.get("category_level2")) + ","
				+ dealStrNull(record.get("category_level3"));
	}

	/**
	 * 扩展维度数据行
	 * 
	 * @param record
	 *            数据
	 * @return
	 */
	private String getBusinesDataLine(Map record)
	{
		return record.get("business_id") + ","
				+ dealStrNull(record.get("business_level1")) + ","
				+ dealStrNull(record.get("business_level2")) + ","
				+ dealStrNull(record.get("business_level3"));
	}

	/**
	 * 事实数据行
	 * 
	 * @param record
	 *            数据
	 * @return
	 */
	private String getFactDataLine(Map record)
	{
		String str = record.get("sensor_uri") + ","
				+ dealStrNull(record.get("sample_value")) + ","
				+ dealStrNull(record.get("sample_value2")) + ","
				+ dealStrNull(record.get("sample_value3")) + ","
				+ dealStrNull(record.get("time_id")) + ","
				+ dealStrNull(record.get("region_id")) + ","
				+ dealStrNull(record.get("category_id")) + ","
				+ dealStrNull(record.get("business1_id")) + ","
				+ dealStrNull(record.get("business2_id")) + ","
				+ dealStrNull(record.get("business3_id"));

		return str;
	}

	/**
	 * 字符串判空处理
	 * 
	 * @param str
	 *            字符串
	 * @return
	 */
	private String dealStrNull(Object obj)
	{
		if (obj == null || String.valueOf(obj).toLowerCase().equals("null"))
		{
			return "";
		}

		return String.valueOf(obj);
	}

	public String getBakPath()
	{
		return bakPath;
	}

}
