package zhuzx.toolkit;

import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;

import org.zhuzx.util.FileOperationUtil;
import org.zhuzx.util.GeneralUtil;
import org.zhuzx.util.JdbcUtil;
import org.zhuzx.util.ListDataUtil;
import org.zhuzx.util.MicroUtil;
import org.zhuzx.util.WebUtil;
import org.zhuzx.util.storage.DataStorage;

public class DataReplicator {

	/** 导出时单页查询的数量 */
	private static final int QUERY_PAGE_SIZE = 10000;
	/** 这个配置为QUERY_PAGE_SIZE的整数，保证每个分卷都是刚好MAX_ROWS_IN_FILE条数（除了最后一个分卷）。 */
	private static final int MAX_ROWS_IN_FILE = QUERY_PAGE_SIZE * 10;
	
	/** 要导出的数据源表名 */
	private static String srcTableName;
	/** 要插入的目标数据表名 */
	private static String destTableName;
	/** 执行导出前用count语句统计的数量，可能跟实际的最终导出数量不一致。 */
	private static int expectTotalNum;
	/** 实际导出的数量，实时累加。 */
	private static int exportedCount;
	/** 要导出的数据源是否为oracle */
	private static boolean isSrcOracle;

	public static void exportToAnotherDatabase(String sqlOrTableName, String uniqKey,
			Consumer<Map<String,Object>> singleRowModifier) {
		String confFile = "zhuzx/toolkit/destDb.properties";
		Connection destConn = JdbcUtil.getDbConn(confFile);
		destTableName = GeneralUtil.loadPropsFileFromClasspath(confFile).get("destTableName");
		if (GeneralUtil.isBlank(destTableName)) {
			getSqlAndInitSrcTableName(sqlOrTableName);
			destTableName = srcTableName;
		}
		Consumer<List<Map<String,Object>>> dataProcessor =
				(List<Map<String,Object>> list) ->
						JdbcUtil.insertByBatch(destConn, destTableName, list);
		exportAllData(sqlOrTableName, uniqKey, singleRowModifier, dataProcessor);
	}
	
	public static String exportToSqlFile(String sqlOrTableName, String uniqKey,
			Consumer<Map<String,Object>> singleRowModifier, String destTableName) {
		getSqlAndInitSrcTableName(sqlOrTableName);//触发初始化源表名
		FileOperationUtil.renameToBackup(FileOperationUtil.getPath(srcTableName));
		DataReplicator.destTableName = GeneralUtil.isNotBlank(destTableName) ? destTableName : srcTableName;
		Consumer<List<Map<String,Object>>> dataProcessor =
				(List<Map<String,Object>> list) -> writeToFile(list);
		exportAllData(sqlOrTableName, uniqKey, singleRowModifier, dataProcessor);
		String fileName = getPhysicalFileName(destTableName, 0);
		if (expectTotalNum > MAX_ROWS_IN_FILE && exportedCount > MAX_ROWS_IN_FILE) {//给分卷生成额外的启动脚本
			//exportedCount是导出到文件的实际总数，何时建立分卷取决于它。
			int fileNum = exportedCount / MAX_ROWS_IN_FILE + ((exportedCount % MAX_ROWS_IN_FILE > 0) ? 1 : 0);
			StringBuilder startup = new StringBuilder();
			for (int i=1; i<=fileNum; i++) {
				Path path = FileOperationUtil.getPath(getPhysicalFileName(destTableName, i));
				startup.append("@'").append(path.toString()).append("'\n");
			}
			String launcherName = getPhysicalFileName(destTableName, 0).replace(".sql", ".Launcher.sql");
			FileOperationUtil.writeToFile(startup.toString().getBytes(StandardCharsets.UTF_8),
					launcherName, true);
			fileName = launcherName;
		}
		return FileOperationUtil.getPath(fileName).toString();
	}
	
	private static void exportAllData(String sqlOrTableName, String uniqKey,
			Consumer<Map<String,Object>> singleRowModifier, Consumer<List<Map<String,Object>>> dataProcessor) {
		String sql = getSqlAndInitSrcTableName(sqlOrTableName);
		System.out.println("开始执行导出任务：\n" + sql);
		uniqKey = GeneralUtil.getNotBlankString(uniqKey, "id");//没传唯一性标识的话就用id来做排序和分页
		String countSql = "select count(*) as CNUM from " + sql.substring(sql.indexOf(srcTableName));
		Connection conn = JdbcUtil.getDbConn(null);
		List<Map<String,String>> countResult = JdbcUtil.executeQuery(countSql, conn);
		//若统计后又有新数据插入表，将导致小误差二种：1、各处的进度提示；2、若该值<=maxNumInFile，则无机会产生分卷。
		expectTotalNum = Integer.parseInt(countResult.get(0).get("CNUM"));
		String prefaceSql = "select min(%s) as min_key, max(%s) as max_key from " + srcTableName;
		List<Map<String,Object>> prefaceList = JdbcUtil.executeQueryForObject(
				String.format(prefaceSql, uniqKey, uniqKey), conn);
		ListDataUtil.modifyKeyNameToLowerCase(prefaceList, false);
		Map<String,Object> tempMap = prefaceList.get(0);
		boolean isNumType = tempMap.get("min_key") instanceof Number;
		String minValue = tempMap.get("min_key").toString();
		System.out.println(srcTableName + String.format("总记录数：%s\n最小标识：%s\n最大标识：%s",
				expectTotalNum, minValue, tempMap.get("max_key")));
		
		String operator = sql.contains(" where ") ? "and" : "where";
		String rollCriteria = " " + operator + " " + uniqKey + " ${exp} ${start}";
		if (!isNumType) {
			rollCriteria = rollCriteria.replace("${start}", "'${start}'");
		}
		sql += rollCriteria + " order by " + uniqKey;
		sql = JdbcUtil.wrapInLimit(sql, 0, QUERY_PAGE_SIZE, conn);
		//读取以前已经达成的进度
		String progressIndicatorKeyPrefix = JdbcUtil.getUniqueIdenOfConnection(conn) + "-" + srcTableName
				+ "-replicate_to-dest-" + srcTableName;
		String progKeyOfValue = progressIndicatorKeyPrefix + "-last_value";
		//startValue这个值，作为分界条件，查询时不包含本身。
		String startValue = DataStorage.getSingleData(progKeyOfValue);
		String progKeyOfCount = progressIndicatorKeyPrefix + "-exported_count";
		String countStr = DataStorage.getSingleData(progKeyOfCount);
		System.out.println("若存在过往的导入进度值，则读入，接续上次进度：\n" +
				progKeyOfValue + "=" + startValue + "\n" + progKeyOfCount + "=" + countStr);
		exportedCount = GeneralUtil.isNotBlank(countStr) ? Integer.parseInt(countStr) : 0;
		isSrcOracle = JdbcUtil.getDatabaseProductName(conn).contains("oracle");
		while (true) {
			String execSql = sql.replace("${exp}", (startValue == null ? ">=" : ">"));
			execSql = execSql.replace("${start}", GeneralUtil.getNotBlankString(startValue, minValue));
			long timeStart = System.currentTimeMillis();
			List<Map<String,Object>> list = JdbcUtil.executeQueryForObject(execSql, conn);
			if (list.size() == 0) {
				System.out.println(srcTableName + "已无待导出数据");
				break;
			}
			if (isSrcOracle)
				ListDataUtil.removeKey(list, "ORACLE_ROWNO");
			if (singleRowModifier != null)
				list.forEach(singleRowModifier);
			dataProcessor.accept(list);
			exportedCount += list.size();
			Map<String,Object> last = list.get(list.size()-1);
			String lastValue = GeneralUtil.getNotBlankString(last.get(uniqKey), last.get(uniqKey.toUpperCase()));
			DataStorage.writeSingleData(progKeyOfValue, lastValue);
			DataStorage.writeSingleData(progKeyOfCount, String.valueOf(exportedCount));
			long cost = System.currentTimeMillis() - timeStart;
			System.out.println(srcTableName + "第" + exportedCount + "条完成，本轮耗时" + cost + "ms，"
					+ "标识字段值：" + lastValue);
			System.out.println(srcTableName + "完成进度：" + MicroUtil.getPercentage(exportedCount, expectTotalNum));
			startValue = lastValue;
		}
		JdbcUtil.close(conn);
		System.out.println(">>>>导出任务结束：" + srcTableName + "一共导出" + exportedCount +
				"，执行前统计的总数为：" + expectTotalNum + "，相差" + (exportedCount-expectTotalNum) + "。");
		//任务完成后，分界值、进度需要清零。
	}
	
	private static String getPhysicalFileName(String tableName, int fileSeq) {
		String fileName = tableName + File.separator + tableName;
		if (fileSeq > 0) {
			fileName += ".part" + fileSeq;
		}
		return fileName + ".sql";
	}
	
	private static void writeToFile(List<Map<String,Object>> list) {
		List<Map<String,String>> stringTypeList = WebUtil.createStringTypeCopy(list);
		//使用此变量控制：如果exportedCount刚好为maxNumInFile整数倍，则切换到下一分卷写入。
		int fileSeq = (expectTotalNum > MAX_ROWS_IN_FILE) ? (exportedCount / MAX_ROWS_IN_FILE + 1) : 0;
		//简单预估这个list转成sql文本后大概的总长度，一句sql中因为有into、tableName、values等额外字眼，会比toString的字符数略多。
		int estimatedLength = list.get(0).toString().length() * 2 * list.size();
		//多条记录合并为一个批量插入语句，在sqlTextContainer中集结全部pageSize条记录后一次写入文件。
		StringBuilder sqlTextContainer = new StringBuilder(estimatedLength);
		int exportedMemCount = exportedCount;
		int firstInCurrentFile = exportedCount + 1;
		for (List<Map<String,String>> subList : ListDataUtil.divideList(stringTypeList, 300)) {
			exportedMemCount += subList.size();
			String msg = "已导入第" + firstInCurrentFile + "—" + exportedMemCount
					+ "，总体进度" + MicroUtil.getPercentage(exportedMemCount, expectTotalNum) + "。";
			if (fileSeq > 0) {//分卷加额外提示语。注：除了非满数的末分卷，其它分卷的总数都为maxNumInFile。
				int totalInFile = (fileSeq > expectTotalNum/MAX_ROWS_IN_FILE) ? (expectTotalNum%MAX_ROWS_IN_FILE) : MAX_ROWS_IN_FILE;
				int curInFile = exportedMemCount - ((fileSeq - 1) * MAX_ROWS_IN_FILE);
				msg += "分卷(" + fileSeq + ")第" + (curInFile-subList.size()+1) + "—" + curInFile
						+ "，分卷进度" + MicroUtil.getPercentage(curInFile, totalInFile) + "。";
			}
			if (isSrcOracle) {
				sqlTextContainer.append(JdbcUtil.createBatchInsertScriptForOracle(destTableName, subList, msg))
						.append("\n");
			}
			firstInCurrentFile += subList.size();
		}
		FileOperationUtil.writeToFile(sqlTextContainer.toString().getBytes(StandardCharsets.UTF_8),
				getPhysicalFileName(destTableName, fileSeq), false);
		//清空，待复用。
		sqlTextContainer.delete(0, sqlTextContainer.length());
	}
	
	private static String getSqlAndInitSrcTableName(String sqlOrTableName) {
		String sql = sqlOrTableName.trim().replaceAll("\\s+", " ").toLowerCase();
		if (sql.contains(" order by ") || sql.contains(" group by ")) {
			throw new IllegalArgumentException();
		}
		if (sql.contains(" from ")) {
			srcTableName = MicroUtil.extractFirst(sql, " from \\S+[ $]").split(" ")[2];
		} else {
			srcTableName = sql;
			sql = "select * from " + srcTableName;
		}
		return sql;
	}
	
	public static void main(String[] args) throws SQLException {
		exportToAnotherDatabase("crm1.c_delivery", "id", null);
	}
	
}