package com.itc.bi.service.impl;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.IdWorker;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.itc.bi.constant.KafkaConstant;
import com.itc.bi.constant.SystemConstant;
import com.itc.bi.dto.kafka.DataSetDTO;
import com.itc.bi.entity.*;
import com.itc.bi.handler.dataSet.PythonCode;
import com.itc.bi.handler.dataSet.SqlCode;
import com.itc.bi.kafaka.SyncDataSetService;
import com.itc.bi.mapper.DirectDataSetMapper;
import com.itc.bi.mapper.DirectoryMapper;
import com.itc.bi.mapper.TableMapper;
import com.itc.bi.service.DataSynApiService;
import com.itc.bi.service.DataSynrecordService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.util.List;

/**
 * @BelongsProject: sourcetreeconfig.json
 * @BelongsPackage: com.huatai.bi.service.impl
 * @Author: hecaiy
 * @CreateTime: 2024-08-11  15:44
 * @Description: TODO
 * @Version: 1.0
 */
@Service
@Slf4j
public class SyncEditColumnProcessService {

	@Autowired
	private DirectoryMapper directoryMapper;
	@Autowired
	private XxlJobInfoServiceImpl xxlJobInfoService;
	@Autowired
	private TableMapper tableMapper;
	@Autowired
	private DataSynrecordService dataSynrecordService;
	@Autowired
	private SqlCode sqlCode;
	@Autowired
	private PythonCode pythonCode;
	@Autowired
	private SyncDataSetService syncDataSetService;
	@Autowired
	private KafkaTemplate<String, Object> kafkaTemplate;
	@Autowired
	private DataSynApiService apiService;
	@Autowired
	private DirectDataSetMapper directDataSetMapper;
	@Autowired
	private ClickHouseJDBCService clickHouseJDBCService;

	/**
	 * 分页最大值
	 */
	private static final Integer PAGESIZE = 10000;

	@Async("asyncServiceExecutor")
	public void processDataSetCode(DataSetDTO dataSetDTOParam, List<DataSetColumnEntity> dataSetColumnEntities){
		DirectoryEntity directoryEntity = directoryMapper.selectById(dataSetDTOParam.getDtId());
		if(ObjectUtil.isEmpty(directoryEntity)){
			return;
		}
		DirectDataSetEntity dataSetEntity = directDataSetMapper.selectById(directoryEntity.getDtId());
		if(ObjectUtil.isEmpty(dataSetEntity)){
			return ;
		}
		//操作完毕之后 删除数仓里面的表
		String tableNameSource = directoryEntity.getTableName();
		//操作完毕之后 删除数仓里面的表
		String versionSource = "";
		String tableName = directoryEntity.getTableName();
		String version = directoryEntity.getVersion();
		if(StringUtils.isEmpty(tableName)){
			return;
		}
		tableName = getTableName(tableName);
		if(StringUtils.isNotEmpty(version)&&!version.equalsIgnoreCase("1")){
			version = getTableName(version);
			versionSource=directoryEntity.getVersion();
		}
		Boolean isTrue = false;
		//判断数据集是否是静态导入，导入的数据直接将原来数据插入即可
		switch (dataSetEntity.getType()) {
			case SystemConstant.DATA_SET_TYPE_EXCEL:
				createTableBySql(tableName,tableNameSource,dataSetColumnEntities,SystemConstant.DATA_SET_TYPE_EXCEL);
				//直接创建附表
				clickHouseJDBCService.createTableByColumnList(version,dataSetColumnEntities,"");
				break;
			case SystemConstant.DATA_SET_TYPE_DB:
				List<TableEntity> list = tableMapper.getTableListByDsIdAndName(dataSetEntity.getTableName(),dataSetEntity.getDsId());
				if(list!=null && list.size()>0){
					//判断是全量还是增量 全量走同步一次的逻辑  增量创建新表同步原数据
					List<DataSynrecordEntity> entityList = dataSynrecordService.list(Wrappers.<DataSynrecordEntity>lambdaQuery().eq(DataSynrecordEntity::getDtId, directoryEntity.getDtId()));
					if(entityList!=null && entityList.size()>0){
						DataSynrecordEntity dataSynrecordEntity = entityList.stream().findFirst().orElse(null);
						if(ObjectUtil.isNotEmpty(dataSynrecordEntity)){
							if(dataSynrecordEntity.getSynType()==1){
								//全量 走同步一次数据逻辑
								isTrue = true;
								String jsonString = JSON.toJSONString(dataSynrecordEntity);
								//执行数据同步
								JSONObject jsonObject = JSONObject.parseObject(jsonString);
								apiService.execDataSyn(jsonObject,null);
							}
							else {
								createTableBySql(tableName,tableNameSource,dataSetColumnEntities,SystemConstant.DATA_SET_TYPE_DB);
								dataSynrecordEntity.setTargetTableName(tableName);
								dataSynrecordEntity.setTargetWornTable(tableNameSource);
								dataSynrecordService.saveOrUpdate(dataSynrecordEntity);
							}
						}
					}
				}
				break;
			case SystemConstant.DATA_SET_TYPE_PYTHON:
				processSqlAndPythonCode(tableName,version,tableNameSource,dataSetColumnEntities,SystemConstant.DATA_SET_TYPE_PYTHON,dataSetDTOParam.getType(),dataSetEntity,isTrue);
				break;
			case SystemConstant.DATA_SET_TYPE_SQL:
				processSqlAndPythonCode(tableName,version,tableNameSource,dataSetColumnEntities,SystemConstant.DATA_SET_TYPE_SQL,dataSetDTOParam.getType(),dataSetEntity,isTrue);
				break;
			default:
				break;
		}
		if(!isTrue){
			directoryEntity.setTableName(tableName);
			directoryEntity.setVersion(version);
			directoryMapper.updateById(directoryEntity);
			clickHouseJDBCService.dropTable(tableNameSource);
			if(versionSource.equalsIgnoreCase("1")){
				clickHouseJDBCService.dropTable(versionSource);
			}
			//异步处理数据加工
			DataSetDTO dataSetDTO = new DataSetDTO();
			dataSetDTO.setDtId(directoryEntity.getDtId());
			dataSetDTO.setIsSuccess(true);
			dataSetDTO.setType("edit");
			kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
		}
	}

	/**
	 * 统一创表
	 * @param tableName
	 * @param tableNameSource
	 * @param dataSetColumnEntities
	 */
	private void createTableBySql(String tableName,String tableNameSource,List<DataSetColumnEntity> dataSetColumnEntities,String type){
		String sql = "";
		if(type.equalsIgnoreCase("DB")){
			sql = clickHouseJDBCService.getCreateTableByColumnListSql(tableName,dataSetColumnEntities,dataSetColumnEntities.get(0).getColumnName());
		}else {
			sql = clickHouseJDBCService.getCreateTableByColumnListSql(tableName,dataSetColumnEntities,"");
		}
		//将原来的数据同步过来 create table as select * from aa
		sql = sql + " as select * from "+tableNameSource;
		clickHouseJDBCService.createTable(sql);
	}

	/**
	 * 处理sql脚本的数据项编辑
	 * @param tableName 主表
	 * @param slaveName 附表
	 * @param tableNameSource 原始表
	 * @param list 字段
	 * @param type 是否是脚本
	 * @param typeEdit 是否是编辑脚本
	 */
	public void processSqlAndPythonCode(String tableName,String slaveName,String tableNameSource,List<DataSetColumnEntity> list,String type,String typeEdit,DirectDataSetEntity dataSetEntity,Boolean isTrue){
		//如果是需要编辑sql和python 则需要才重新创建表,重新拉取数据 数据拉取走执行一次的逻辑 这里重新创建即可
		if(typeEdit!=null && typeEdit.equalsIgnoreCase("edit")){
			//先创建表 然后同步数据 sql脚本存在主附表
			clickHouseJDBCService.createTableByColumnList(tableName,list,"");
			clickHouseJDBCService.createTableByColumnList(slaveName,list,"");
			//创建表结束后 同步数据 在用kafka推送数据
			//1.sql  分页获取数据
			if(type.equalsIgnoreCase(SystemConstant.DATA_SET_TYPE_SQL)){
				IPage page = sqlCode.page(dataSetEntity, 1, 1);
				//2.插入数据
				long total = page.getTotal();
				long pages = (long)Math.ceil((double)total / PAGESIZE);
				for (int i = 1; i <= pages; i++) {
					List<Object> objects = sqlCode.previewData(dataSetEntity,i,PAGESIZE);
					if(CollUtil.isNotEmpty(objects) && objects.size()>0){
						clickHouseJDBCService.insertDataByTableNameObject(tableName,objects);
					}
				}
			}else { //python
				List<Object> listObj = pythonCode.list(dataSetEntity);
				clickHouseJDBCService.insertDataByTableNameObject(tableName,listObj);
			}
			DataSetDTO dataSetDTO = new DataSetDTO();
			dataSetDTO.setDtId(dataSetEntity.getDtId());
			dataSetDTO.setType("edit");
			//通知数据加工数据同步服务  开始进行数据加工同步
			kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
			isTrue = true;
		}else {
			createTableBySql(tableName,tableNameSource,list,type);
			clickHouseJDBCService.createTableByColumnList(slaveName,list,"");
		}
	}

	/**
	 * 生成表名
	 * @param tableName
	 * @return
	 */
	private String getTableName(String tableName){
		tableName = tableName.substring(0,tableName.lastIndexOf("_"));
		tableName = tableName+"_"+ IdWorker.getId();
		return tableName;
	}
}
