package com.huatai.bi.kafaka;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.IdWorker;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.huatai.bi.config.PythonConf;
import com.huatai.bi.constant.KafkaConstant;
import com.huatai.bi.constant.RedisConstant;
import com.huatai.bi.constant.SystemConstant;
import com.huatai.bi.dto.bigScreen.BigScreenGlobalFilterDTO;
import com.huatai.bi.dto.kafka.DataSetDTO;
import com.huatai.bi.entity.*;
import com.huatai.bi.enums.ProcessEnum;
import com.huatai.bi.handler.DatasourceHandler;
import com.huatai.bi.handler.dataSet.PythonCode;
import com.huatai.bi.handler.dataSet.SqlCode;
import com.huatai.bi.mapper.*;
import com.huatai.bi.service.*;
import com.huatai.bi.service.impl.ClickHouseJDBCService;
import com.huatai.bi.utils.PythonCodeUtil;
import com.huatai.bi.vo.bigScreenVO.ProDirDataSetVO;
import com.huatai.common.api.ResultCode;
import com.huatai.common.error.exception.ServiceException;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.lang3.StringUtils;
import org.redisson.api.RLock;
import org.redisson.api.RedissonClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.util.Assert;

import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
 * @BelongsProject: itc-airport-cloud
 * @BelongsPackage: com.huatai.bi.kafaka
 * @Author: hecaiy
 * @CreateTime: 2024-07-09  17:27
 * @Description: TODO
 * @Version: 1.0
 */
@Service
@Log4j2
public class SyncDataSetService {

	@Autowired
	private RedissonClient redissonClient;
	@Autowired
	private DirectoryMapper directoryMapper;
	@Autowired
	private DataSetColumnService dataSetColumnService;
	@Autowired
	private DirectDataSetMapper directDataSetMapper;
	@Autowired
	private ClickHouseJDBCService clickHouseJDBCService;
	@Lazy
	@Autowired
	private SqlCode sqlCode;
	@Lazy
	@Autowired
	private PythonCode pythonCode;
	@Autowired
	private KafkaTemplate<String, Object> kafkaTemplate;
	@Autowired
	private TableMapper tableMapper;
	@Autowired
	private DataSynrecordMapper dataSynrecordMapper;
	@Autowired
	@Lazy
	private DataSynApiService apiService;
	@Autowired
	@Lazy
	private DataProcessService dataProcessService;
	@Autowired
	@Lazy
	private DataProcessInfoService dataProcessInfoService;
	@Autowired
	private DataSetParameterService dataSetParameterService;
	@Autowired
	DatasourceService datasourceService;
	@Autowired
	PythonConf pythonConf;
	@Autowired
	DataProcessInfoMapper dataProcessInfoMapper;

	private Map<Long,Thread> threadMap=new HashMap<>();

	/**
	 * 分页最大值
	 */
	private static final Integer PAGESIZE = 10000;


	@Async("asyncServiceExecutor")
	public void synDataSet(Long dtId,Boolean createTableFlag,com.alibaba.fastjson.JSONObject jsonObject){

		DirectoryEntity directory = directoryMapper.selectById(dtId);
		if(ObjectUtil.isEmpty(directory)){
			log.error("数据集信息不存在");
			return;
		}
		DirectDataSetEntity dataSet = directDataSetMapper.selectById(dtId);
		if(ObjectUtil.isEmpty(dataSet)){
			log.error("数据集信息不存在");
			return;
		}

		log.info("数据集ID:{} 名称:{},数据集类型:{} 正在同步数据,创表状态:{}",directory.getDtId(),directory.getName(),dataSet.getType(),createTableFlag);
		List<DataSetColumnEntity> list = null;
		if(createTableFlag){
			list = dataSetColumnService.list(Wrappers.<DataSetColumnEntity>lambdaQuery().eq(DataSetColumnEntity::getDataSetId, dtId)
				.orderByAsc(DataSetColumnEntity::getColumnPosition));
			if(ObjectUtil.isEmpty(list)){
				log.error("数据集字段为空");
				return;
			}
		}


		if(SystemConstant.DATA_SET_TYPE_DB.equals(dataSet.getType())){
			if(createTableFlag){
				List<TableEntity> tableList = tableMapper.getTableListByDsIdAndName(dataSet.getTableName(),dataSet.getDsId());
				if(tableList!=null && tableList.size()>0){
					//判断是全量还是增量 全量走同步一次的逻辑  增量创建新表同步原数据
					List<DataSynrecordEntity> entityList = dataSynrecordMapper.selectList(Wrappers.<DataSynrecordEntity>lambdaQuery().eq(DataSynrecordEntity::getDtId, directory.getDtId()));
					if(entityList!=null && entityList.size()>0){
						DataSynrecordEntity dataSynrecordEntity = entityList.stream().findFirst().orElse(null);
						if(ObjectUtil.isNotEmpty(dataSynrecordEntity)){
							if(dataSynrecordEntity.getSynType()==1){
								//全量 走同步一次数据逻辑
								String jsonString = JSON.toJSONString(dataSynrecordEntity);
								//执行数据同步
								apiService.execDataSyn(com.alibaba.fastjson.JSONObject.parseObject(jsonString),"edit");
							}
							else {
								String tableName = directory.getTableName();
								tableName = tableName.substring(0,tableName.lastIndexOf("_"));
								tableName = tableName+"_"+ IdWorker.getId();
								String sql = clickHouseJDBCService.getCreateTableByColumnListSql(tableName,list,list.get(0).getColumnName());
								sql = sql + " as select * from "+directory.getTableName();
								clickHouseJDBCService.createTable(sql);
								dataSynrecordEntity.setTargetTableName(tableName);
								dataSynrecordEntity.setTargetWornTable(directory.getTableName());
								dataSynrecordMapper.updateById(dataSynrecordEntity);
							}
						}
					}
				}
			}else{
				apiService.execDataSyn(jsonObject,null);
			}
		}else{
			String key = RedisConstant.DATA_SET_SYN + dtId;
			RLock lock = redissonClient.getLock(key);
			try {
				//加锁
				Boolean flag = lock.tryLock();
				log.info("第一次尝试加锁,加锁状态:{}",flag);
				if(!flag){ //加锁失败
					if(createTableFlag){//如果是编辑需要创表,优先级笔同步数据高,结束还未处理完的流程开始本流程
						if(threadMap.containsKey(dtId)){
							try {
								threadMap.remove(dtId).interrupt();//中断之前流程
							}catch (Exception e){}
							try {
								flag = lock.tryLock(30,TimeUnit.SECONDS);//尝试加锁
								log.info("创表第二次尝试加锁,加锁状态:{}",flag);
							}catch (Exception e){}
						}
					}else{
						log.info("数据集:{}正在同步,直接返回",dtId);
						return;
					}
				}
				threadMap.put(dtId,Thread.currentThread());

				String masterTableName = directory.getTableName();
				String slaveTableName = directory.getVersion();

				Boolean newTableFlag = false;
				//是否创建表
				if(createTableFlag){
					log.info("需要创表");
					//原表不存在
					if(StringUtils.isBlank(masterTableName)){
						newTableFlag = true;
						masterTableName = dataSet.getType().toLowerCase()+"_master_"+dtId;
					}

					//原表不存在
					if(StringUtils.isBlank(slaveTableName)){
						slaveTableName = dataSet.getType().toLowerCase()+"_slave_"+dtId;
					}else{ //表已经存在,删除附表
						log.info("删除附表:{}",slaveTableName);
						deletTable(slaveTableName);
					}
					//重新创建附表
					log.info("重建附表:{}",slaveTableName);
					clickHouseJDBCService.createTableByColumnList(slaveTableName, list,"");
				}else if(StringUtils.isBlank(masterTableName)){
					return;
				}
				directory.setTableName(slaveTableName);
				directory.setVersion(masterTableName);

				//创建新表先更新,更新表同步数据后更新
				if(newTableFlag){
					directoryMapper.updateById(directory);
				}
				//数据同步
				log.info("开始数据同步");
				dataSyn(directory,dataSet);
				log.info("完成数据同步");
				if(!newTableFlag){
					directoryMapper.updateById(directory);
				}

				DataSetDTO dataSetDTO = new DataSetDTO();
				dataSetDTO.setDtId(dtId);
				//处理主表,已存在时先删除在创建
				if(createTableFlag){
					//表已经存在,删除附表
					if(!newTableFlag){
						deletTable(masterTableName);
					}
					//重新创建附表
					clickHouseJDBCService.createTableByColumnList(masterTableName, list,"");
					dataSetDTO.setType("edit");
				}else{
					clickHouseJDBCService.emptyTable(masterTableName);
				}
				//通知数据加工数据同步服务  开始进行数据加工同步
				kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
			}catch (Exception e){
				log.error("数据集:{}数据同步异常,异常信息:{}",dtId,e.getMessage());
			}finally {
				if(null != lock && lock.isHeldByCurrentThread()){
					lock.unlock();
				}
			}
		}
		LambdaUpdateWrapper<DirectDataSetEntity> wrapper = Wrappers.lambdaUpdate(DirectDataSetEntity.class);
		wrapper.eq(DirectDataSetEntity::getId,dtId);
		wrapper.set(DirectDataSetEntity::getDataUpdateTime,new Date());
		directDataSetMapper.update(null,wrapper);
	}

	private void dataSyn(DirectoryEntity directory,DirectDataSetEntity dataSet){
		if(SystemConstant.DATA_SET_TYPE_SQL.equals(dataSet.getType())){
			try{
				clickHouseJDBCService.emptyTable(directory.getTableName());
				IPage page = sqlCode.page(dataSet, 1, 1);
				//2.插入数据
				long total = page.getTotal();
				long pages = (long)Math.ceil((double)total / PAGESIZE);
				for (int i = 1; i <= pages; i++) {
					List<Object> objects = sqlCode.previewData(dataSet,i,PAGESIZE);
					if(CollUtil.isNotEmpty(objects) && objects.size()>0){
						clickHouseJDBCService.insertDataByTableNameObject(directory.getTableName(),objects);
					}
				}
			}catch (Exception e){
			}
		}else if(SystemConstant.DATA_SET_TYPE_PYTHON.equals(dataSet.getType())){
			List<Object> objects = pythonCode.previewData(dataSet,null,null);
			if(ObjectUtil.isNotEmpty(objects)){
				clickHouseJDBCService.insertDataByTableNameObject(directory.getTableName(),objects);
			}
		}else if(SystemConstant.DATA_SET_TYPE_EXCEL.equals(dataSet.getType())){
			String sql = "select * from "+directory.getVersion();
			clickHouseJDBCService.insertTableProcessDS(directory.getTableName(),sql);
		}
	}

	/**
	 * 逐步加工，新建表执行
	 * @param dtId
	 */
	@Async("asyncServiceExecutor")
	public void dataSetProcess(Long dtId,Boolean createTableFlag) {

		String key = RedisConstant.DATA_SET_PROCESS_SYN + dtId;
		RLock lock = redissonClient.getLock(key);
		log.info("加工数据集数据同步:{},重新创建表:{}",dtId,createTableFlag);
		try{
			//加锁
			Boolean flag = lock.tryLock();
			if(!flag){ //加锁失败
				if(createTableFlag){//如果是编辑需要创表,优先级笔同步数据高,结束还未处理完的流程开始本流程
					if(threadMap.containsKey(dtId)){
						try {
							threadMap.remove(dtId).interrupt();//中断之前流程
						}catch (Exception e){}
						try {
							lock.tryLock(30,TimeUnit.SECONDS);//尝试加锁
						}catch (Exception e){}
					}
				}else{
					log.info("数据集:{}正在同步,直接返回",dtId);
					return;
				}
			}
			threadMap.put(dtId,Thread.currentThread());

			//原始数据集
			DirectoryEntity directoryEntity = directoryMapper.selectById(dtId);
			if(ObjectUtil.isNotEmpty(directoryEntity)){
				//替换加工详情中的第一个占位符
				String tableName = directoryEntity.getTableName();
				//更新数据加工信息
				List<DataProcessEntity> updateList = new ArrayList<>();
				/**************************加工数据集*************************/
				List<DataProcessEntity> processList = dataProcessService.list(Wrappers.<DataProcessEntity>lambdaQuery()
					.eq(DataProcessEntity::getDtId, dtId)
					.eq(DataProcessEntity::getType, "process"));
				String sql = null;
				if(CollectionUtil.isNotEmpty(processList)){
					List<Long> processIds = processList.stream().map(item -> item.getId()).collect(Collectors.toList());
					List<DataProcessInfoEntity> infoList = dataProcessInfoService.list(Wrappers.<DataProcessInfoEntity>lambdaQuery()
						.in(DataProcessInfoEntity::getDpId, processIds)
						.orderByAsc(DataProcessInfoEntity::getOrderNum));

					//根据数据加工ID存储加工步骤列表
					Map<Long,List<DataProcessInfoEntity>> infoMap = new HashMap<>();

					for (DataProcessInfoEntity item : infoList){
						if(!infoMap.containsKey(item.getDpId())){
							infoMap.put(item.getDpId(),new ArrayList<>());
						}
						infoMap.get(item.getDpId()).add(item);
					}

					//遍历加工数据集
					for (DataProcessEntity item : processList){
						log.info("开始处理加工数据,ID:{},名称:{}",item.getId(),item.getName());
						//主副表切换
						String table = item.getTableName();
						item.setTableName(item.getVersion());
						item.setVersion(table);
						updateList.add(item);

						//加工步骤
						List<DataProcessInfoEntity> processInfoList = infoMap.get(item.getId());
						int num = processInfoList.size();
						for (DataProcessInfoEntity info : processInfoList){
							String processInfoTableName = info.getProcessTable();
							if(1 == num ){//最后一步取加工表名
								processInfoTableName = item.getTableName();
							}
							String parameterValues = info.getParameterValues();
							if (parameterValues.contains("${tableName}")){
								parameterValues = parameterValues.replace("${tableName}", tableName);
							}
							if(parameterValues.contains("${unionTableName}")){
								//需先判断数据集是不是当前数据集id，当前数据集不查询数据库
								if(dtId.equals(info.getUnionDtId())){
									parameterValues = parameterValues.replace("${unionTableName}", tableName);
								}else {
									DirectoryEntity directory = directoryMapper.selectById(info.getUnionDtId());
									parameterValues = parameterValues.replace("${unionTableName}", directory.getTableName());
								}
							}

							log.info("执行sql:{}",parameterValues);
							//主副表已切换，需要创表时先删除表再创表，不需要创表时情况表再导入数据
							if(createTableFlag){
								deletTable(processInfoTableName);
								clickHouseJDBCService.createTableProcessDS(processInfoTableName,null, parameterValues);
							}else{
								//清空表
								clickHouseJDBCService.emptyTable(processInfoTableName);
								//往表中插入数据
								clickHouseJDBCService.insertTableProcessDS(processInfoTableName, parameterValues);
							}
							num--;
						}
					}
					//主副表更换
					dataProcessService.updateBatchById(updateList);
					for (DataProcessEntity item : processList){
						if(createTableFlag){
							deletTable(item.getVersion());
							sql = "select * from "+item.getTableName();
							clickHouseJDBCService.createTableProcessDS(item.getVersion(),null, sql);
						}
						//清空副表
						clickHouseJDBCService.emptyTable(item.getVersion());
					}
				}
				updateList.clear();

				/**************************合并数据集****************************/

				log.info("开始处理合并数据集");
				//调整
				List<DataProcessInfoEntity> infoList = dataProcessInfoService.list(Wrappers.<DataProcessInfoEntity>lambdaQuery()
					.eq(DataProcessInfoEntity::getType, ProcessEnum.UNION.type)
					.eq(DataProcessInfoEntity::getUnionDtId, dtId));
				if(CollectionUtil.isNotEmpty(infoList)){
					List<Long> dpIds = infoList.stream().map(p -> p.getDpId()).distinct().collect(Collectors.toList());
					processList = dataProcessService.list(Wrappers.<DataProcessEntity>lambdaQuery()
						.eq(DataProcessEntity::getType, "process")
						.in(DataProcessEntity::getId, dpIds)
						.ne(DataProcessEntity::getDtId, dtId));
					Map<Long, DataProcessEntity> map = processList.stream().collect(Collectors.toMap(DataProcessEntity::getId, Function.identity()));
					LambdaQueryWrapper<DataProcessInfoEntity> qw = Wrappers.lambdaQuery();

					String unionTableName = tableName;
					for (DataProcessInfoEntity info : infoList){


						DataProcessEntity process = map.get(info.getDpId());
						log.info("合并数据集开始处理,ID:{},名称:{}",process.getId(),process.getName());
						//主副表切换
						String table = process.getTableName();
						process.setTableName(process.getVersion());
						process.setVersion(table);
						updateList.add(process);

						//原数据集表
						tableName = directoryMapper.selectById(process.getDtId()).getTableName();

						qw.clear();
						qw.eq(DataProcessInfoEntity::getDpId,info.getDpId());
						qw.ge(DataProcessInfoEntity::getOrderNum,info.getOrderNum());
						qw.orderByAsc(DataProcessInfoEntity::getOrderNum);
						List<DataProcessInfoEntity> list = dataProcessInfoService.list(qw);
						int num = list.size();
						for (DataProcessInfoEntity item : list){
							String processInfoTableName = item.getProcessTable();
							if(1 == num ){//最后一步取加工表名
								processInfoTableName = process.getTableName();
							}
							String parameterValues = info.getParameterValues();
							if (parameterValues.contains("${tableName}")){
								parameterValues = parameterValues.replace("${tableName}", tableName);
							}
							if(parameterValues.contains("${unionTableName}")){
								parameterValues = parameterValues.replace("${unionTableName}", unionTableName);
							}

							log.info("执行sql:{}",parameterValues);
							//主副表已切换，需要创表时先删除表再创表，不需要创表时情况表再导入数据
							if(createTableFlag){
								deletTable(processInfoTableName);
								clickHouseJDBCService.createTableProcessDS(processInfoTableName,null, parameterValues);
							}else{
								//清空表
								clickHouseJDBCService.emptyTable(processInfoTableName);
								//往表中插入数据
								clickHouseJDBCService.insertTableProcessDS(processInfoTableName, parameterValues);
							}
						}
					}
					//主副表更换
					dataProcessService.updateBatchById(updateList);
					for (DataProcessEntity item : processList){
						if(createTableFlag){
							deletTable(item.getVersion());
							sql = "select * from "+item.getTableName();
							clickHouseJDBCService.createTableProcessDS(item.getVersion(),null, sql);
						}
						//清空副表
						clickHouseJDBCService.emptyTable(item.getVersion());
					}
				}
			}
		}catch (Exception e){
			log.error("数据集:{}数据同步异常,异常信息:{}",dtId,e.getMessage());
		}finally {
			if(null != lock && lock.isHeldByCurrentThread()){
				lock.unlock();
			}
		}
	}

	private void deletTable(String tableName){
		try {
			clickHouseJDBCService.dropTable(tableName);
		}catch (Exception e){}
	}

	//参数联动临时同步数据
	public String paramTempSynData(ProDirDataSetVO vo, List<BigScreenGlobalFilterDTO> globals, String title){

		if(CollectionUtil.isEmpty(globals) || null == vo){
			return null;
		}
		//DB和EXEC类型的数据集无参数
		if(SystemConstant.DATA_SET_TYPE_DB.equals(vo.getType()) || SystemConstant.DATA_SET_TYPE_EXCEL.equals(vo.getType())){
			return null;
		}

		Map<String,Object> paramMap = new HashMap<>();
		for(BigScreenGlobalFilterDTO item : globals){//遍历全局组件
			if(StringUtils.isNotBlank(item.getParam1())){//绑定有参数
				if(paramMap.containsKey(item.getParam1())){//参数不在map时生成key
					paramMap.put(item.getParam1(),new ArrayList());
					throw new ServiceException(String.format("组件 %s 参数 %s 被重复绑定",title,item.getParam1()));
				}
				paramMap.put(item.getParam1(),item.getValue());
			}

			if(StringUtils.isNotBlank(item.getParam2())){//绑定有参数
				if(paramMap.containsKey(item.getParam2())){//参数不在map时生成key
					paramMap.put(item.getParam2(),new ArrayList());
					throw new ServiceException(String.format("组件 %s 参数 %s 被重复绑定",title,item.getParam2()));
				}
				paramMap.put(item.getParam2(),item.getValue());
			}
		}
		if(paramMap.size()==0){//无参数,直接返回
			return null;
		}

		//获取数据集参数
		List<DataSetParameterEntity> params = dataSetParameterService.listByDtId(vo.getId());
		if(CollectionUtil.isEmpty(params)){
			return null;
		}
		JSONObject param = new JSONObject();
		for (DataSetParameterEntity item : params){

			String name = item.getName();
			String type = item.getType();
			String value = item.getDefaultValue();
			if(paramMap.containsKey(name)){
				value = (String)paramMap.remove(name);
			}

			if(SystemConstant.DATA_TYPE_STRING.equals(type)){
				param.put(name,value);
			}else if(SystemConstant.DATA_TYPE_NUMBER.equals(type)){
				if(value.indexOf(".")>0){
					param.put(name,Double.valueOf(value));
				}else{
					param.put(name,Long.valueOf(value));
				}
			}else if(SystemConstant.DATA_TYPE_DATE.equals(type)){
				try {
					SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
					param.put(name,sdf.parse(value));
				}catch (Exception e){
					throw new ServiceException("日期格式异常，请检查");
				}
			}
		}
		if(paramMap.size() > 0){
			throw new ServiceException(String.format("参数 %s 不存在",paramMap.keySet().toString()));
		}
		List<String> tableNames = new ArrayList<>();
		Long dtId = vo.getId();
		DataProcessEntity process = null;
		if(SystemConstant.PROCESS_TYPE_PROCESS.equals(vo.getType())){
			process = dataProcessService.getById(vo.getId());
			Assert.notNull(process,"数据集不存在");
			dtId = process.getDtId();
		}

		DirectDataSetEntity dataSet = directDataSetMapper.selectById(dtId);
		Assert.notNull(dataSet,"数据集不存在");

		//原始数据集
		String tableName = sourceDataSetSyn(dataSet, vo.getTableName(), param);

		//如果是加工数据集
		if(null != process){
			List<DataProcessInfoEntity> infos = dataProcessInfoMapper.infoListByProcessId(process.getId());
			for (DataProcessInfoEntity info : infos){
				String temp = "temp_"+IdWorker.getId();//临时表

				String processInfoTableName = info.getProcessTable();
				String parameterValues = info.getParameterValues();
				if (parameterValues.contains("${tableName}")){
					parameterValues = parameterValues.replace("${tableName}", tableName);
				}
				if(parameterValues.contains("${unionTableName}")){
					//需先判断数据集是不是当前数据集id，当前数据集不查询数据库
					if(dtId.equals(info.getUnionDtId())){
						parameterValues = parameterValues.replace("${unionTableName}", tableName);
					}else {
						DirectoryEntity directory = directoryMapper.selectById(info.getUnionDtId());
						parameterValues = parameterValues.replace("${unionTableName}", directory.getTableName());
					}
				}

				if(tableNames.size()>0){
					parameterValues = parameterValues.replace(processInfoTableName,tableNames.get(tableNames.size()-1));
				}
				log.info("执行sql:{}",parameterValues);
				clickHouseJDBCService.createTableProcessDS(temp,null, parameterValues);
				tableNames.add(temp);
			}
		}
		tableNames.add(0,tableName);//将原始数据集加工临时表插入到列表第一个
		tableName = tableNames.remove(tableNames.size()-1);//获取到最后一个作为最终查询表
		for (String item : tableNames) {//删除中间步骤表
			try {
				clickHouseJDBCService.dropTable(item);
			}catch (Exception e){}
		}
		return tableName;
	}

	//原数据集临时同步,是用临时表,用完删除
	private String sourceDataSetSyn(DirectDataSetEntity dataSet, String tableName, JSONObject param) {
		String tableNameTemp = "temp_" + IdWorker.getId();
		String sql = "select * from " + tableName + " where 1=2";
		clickHouseJDBCService.createTableProcessDS(tableNameTemp, dataSet.getId(), sql);

		try {
			if (SystemConstant.DATA_SET_TYPE_SQL.equals(dataSet.getType())) {
				DatasourceEntity datasource = getDadasource(dataSet.getDsId());
				DatasourceHandler handler = null;

				try {
					handler = DatasourceHandler.datasourceFactory(datasource);
					int pageIndex = 1;
					int maxPageLimit = 10000; // 防止死循环
					while (pageIndex <= maxPageLimit) {
						IPage page = handler.getPageDataBySql(dataSet.getCodeScript(), pageIndex, PAGESIZE, param);
						List records = page.getRecords();
						if (CollectionUtil.isNotEmpty(records)) {
							clickHouseJDBCService.insertDataByTableNameObject(tableNameTemp, records);
						}

						if (records.size() < PAGESIZE) {
							break; // 最后一页
						}

						pageIndex++;
					}
				} catch (Exception e) {
					throw new ServiceException(ResultCode.FAILURE, e);
				} finally {
					if (handler != null) {
						handler.closeConn();
					}
				}

			} else if (SystemConstant.DATA_SET_TYPE_PYTHON.equals(dataSet.getType())) {
				// 参数替换，避免死循环
				String code = dataSet.getCodeScript();
				Pattern pattern = Pattern.compile("\\$\\{(.*?)}");
				Matcher matcher = pattern.matcher(code);
				StringBuffer sb = new StringBuffer();
				while (matcher.find()) {
					String key = matcher.group(1).trim();
					String value = param.getString(key);
					matcher.appendReplacement(sb, value != null ? Matcher.quoteReplacement(value) : "");
				}
				matcher.appendTail(sb);
				code = sb.toString();

				List list = PythonCodeUtil.exec(pythonConf.getExecCommand(), code, pythonConf.getCodePath(), dataSet.getId());
				if (CollectionUtil.isNotEmpty(list)) {
					clickHouseJDBCService.insertDataByTableNameObject(tableNameTemp, list);
				}
			}

		} catch (Exception e) {
			throw new ServiceException(ResultCode.FAILURE, e);
		}

		// 如需保留临时表数据，可注释掉以下行
		clickHouseJDBCService.dropTableIfExist(tableNameTemp);

		return tableNameTemp;
	}

	private DatasourceEntity getDadasource(Long id){
		DatasourceEntity datasource = datasourceService.getDecodeData(id);
		if(null == datasource){
			throw new ServiceException("数据源不存在");
		}
		return datasource;
	}
}
