package com.walker.service.meta;

import java.sql.Connection;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import com.walker.service.common.CommonService;
import com.walker.util.AesEncryptUtils;
import com.walker.util.CommonDao;
import com.walker.util.CommonUtils;
import com.walker.util.ConstantUtil;
import com.walker.util.DateUtils;
import com.walker.util.JdbcUtil;

@Service("SyncMetaHiveService")
public class SyncMetaHiveService {

	private static final Log log = LogFactory.getLog(SyncMetaHiveService.class);
	
	@Autowired
	@Qualifier("dmJdbcTemplate")
	private JdbcTemplate dmJdbcTemplate;

	@Autowired
	private CommonDao commonDao;
	
	@Autowired
	private CommonService commonService;
	
	/**
	 **同步入口 
	 * @throws Exception
	 */
	@Scheduled(cron = "${sync.meta.interval.time:0 0 12 * * *}")
	@Transactional(value = "dmTransactionManager",rollbackFor={Exception.class})
	public void syncHiveMeta() throws Exception {
		log.info("==========元数据同步开始==========");
		//目前只同步外部引进来的项目
		String sql = "select p.id as project_id,p.project_code,p.project_name,p.engine_id,p.liable_info as login_code,b.busin_id,b.busin_name from t_meta_project p,t_meta_business_field b where p.busin_id = b.busin_id and p.status = '0' and p.project_type = 'external'";
		List<Map<String, Object>> projectList = dmJdbcTemplate.queryForList(sql);
		
		List<Map<String,Object>> syncHiveList = new ArrayList<Map<String,Object>>();
		for (Map<String, Object> projectInfo : projectList) {
			
			syncHiveMeta(projectInfo,syncHiveList);
		}
		
		if(!syncHiveList.isEmpty()){
			//全量同步t_meta_sync_hive
			sql = "delete from t_meta_sync_hive";
			dmJdbcTemplate.update(sql);
			
			sql = "insert into t_meta_sync_hive(project_id,db_id,db_name,tbl_id,tbl_name,tbl_comment,tbl_create_time,tbl_type,transient_time,col_name,col_comment,col_type,col_integer_idx,col_category,last_sync_time) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
			String columns = "project_id,db_id,db_name,tbl_id,tbl_name,tbl_comment,tbl_create_time,tbl_type,transient_time,col_name,col_comment,col_type,col_integer_idx,col_category,last_sync_time";
			commonDao.insertBatchTableData(syncHiveList, sql, columns);
		}
		log.info("==========元数据同步结束==========");
	}
	
	@Transactional(value = "dmTransactionManager",rollbackFor={Exception.class})
	public void syncHiveMeta(String project_code) throws Exception {
		//目前只同步外部引进来的项目
		String sql = "select p.id as project_id,p.project_code,p.project_name,p.engine_id,p.liable_info as login_code,b.busin_id,b.busin_name from t_meta_project p,t_meta_business_field b where p.busin_id = b.busin_id and p.status = '0' and p.project_type = 'external' and p.project_code = ?";
		List<Map<String, Object>> projectList = dmJdbcTemplate.queryForList(sql,project_code);
		
		List<Map<String,Object>> syncHiveList = new ArrayList<Map<String,Object>>();
		for (Map<String, Object> projectInfo : projectList) {
			
			syncHiveMeta(projectInfo,syncHiveList);
		}
		
		if(!syncHiveList.isEmpty()){
			//全量同步t_meta_sync_hive
			sql = "delete from t_meta_sync_hive where db_name = ?";
			dmJdbcTemplate.update(sql,project_code);
			
			sql = "insert into t_meta_sync_hive(project_id,db_id,db_name,tbl_id,tbl_name,tbl_comment,tbl_create_time,tbl_type,transient_time,col_name,col_comment,col_type,col_integer_idx,col_category,last_sync_time) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
			String columns = "project_id,db_id,db_name,tbl_id,tbl_name,tbl_comment,tbl_create_time,tbl_type,transient_time,col_name,col_comment,col_type,col_integer_idx,col_category,last_sync_time";
			commonDao.insertBatchTableData(syncHiveList, sql, columns);
		}
	}
	
	public boolean getHiveDb(Object engine_id,Object project_code) throws Exception {
		boolean flag = true;
		Connection conn = null;
		try{
			log.info("engine_id："+engine_id);
			log.info("project_code："+project_code);
			project_code = CommonUtils.valueOf(project_code).toLowerCase();
			
			conn = getConnection(engine_id);
			
			String sql = "select distinct db_id,name from DBS where name = ?";
			List<Map<String,Object>> dbList = JdbcUtil.queryForList(conn, sql,project_code);
			if(dbList.isEmpty()){
				flag = false;
			}
		}catch(Exception e){
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
			throw new Exception(e.getMessage());
		}finally{
			JdbcUtil.close(conn);
		}
		return flag;
	}
	
	private void syncHiveMeta(Map<String,Object> projectInfo,List<Map<String,Object>> syncHiveList) throws Exception {
		Map<String, String> diffMap = new HashMap<String, String>();
		
		Map<String,Map<String,List<Map<String,Object>>>> hiveColMap = compareHiveMeta(projectInfo,diffMap,syncHiveList);

		List<String> invalidCubeCodeList = new ArrayList<String>();
		
		//查询之前已经失效的表，用于新增做比对，若之前hive表误删了，元数据置为失效状态，后来hive表又重新创建了，则将元数据重新置为发布状态
		//status：1、针对表删除，99、针对表字段修改
		String sql = "select cube_code,cube_name,object_type from t_meta_cubes where object_type in ('11','16','17','18','24','38','39','40','41','42','44','50') and status in('1','99') and project_id = ?";
		List<Map<String,Object>> invalidCubeList = dmJdbcTemplate.queryForList(sql,projectInfo.get("project_id"));
		for(Map<String,Object> invalidCubeMap : invalidCubeList){
			
			invalidCubeCodeList.add(CommonUtils.valueOf(invalidCubeMap.get("cube_code")).toLowerCase());
		}
		
		for (Map.Entry<String, String> entry : diffMap.entrySet()) {
			 String cube_code = entry.getKey();
			 String tag = entry.getValue();
			
			 if(StringUtils.equals(tag, "del")){
				 del(projectInfo, cube_code);
			 }else{
				 //表字段信息
				 List<Map<String,Object>> colList = hiveColMap.get(cube_code).get("columns");
				 //表分区信息
				 List<Map<String,Object>> partColList = hiveColMap.get(cube_code).get("partition");
				 String cube_name = "sync";
				 if(colList != null && !colList.isEmpty()){
					 cube_name = CommonUtils.valueOf(colList.get(0).get("tbl_comment"));
					 cube_name = StringUtils.isBlank(cube_name) ? "sync" : cube_name;
				 }
				 if(partColList == null){
					 partColList = new ArrayList<Map<String,Object>>();
				 }
				 if (StringUtils.equals(tag, "mod")) {
					 mod(projectInfo,cube_code,colList,partColList,invalidCubeCodeList);
				 }else if(StringUtils.equals(tag, "add")){
					 add(projectInfo,cube_code,cube_name,colList,partColList,invalidCubeCodeList);
				 }
			 }
		 }
	}
	
	private void del(Map<String,Object> projectInfo, String cube_code) throws Exception {
		Object project_id = projectInfo.get("project_id");
		String sql = "select cube_id,cube_code,object_type,logic_id,status,project_id from t_meta_cubes where object_type in ('11','16','17','18','24','38','39','40','41','42','44','50') and cube_code = ? and project_id = ?";
		
		log.info("===========del===============");
		log.info("cube_code："+cube_code);
		log.info("project_id："+project_id);
		
		Map<String,Object> cubeMap = dmJdbcTemplate.queryForMap(sql,cube_code,project_id);
		String object_type = CommonUtils.valueOf(cubeMap.get("object_type"));
		if(StringUtils.equals(object_type, ConstantUtil.FACT_OBJECT_TYPE) || StringUtils.equals(object_type, "50")){
			if(StringUtils.equals(cube_code, "t_meta_dim_param") || StringUtils.equals(cube_code, "id_mapping")){
			}else{
				delFactOper(cubeMap);
			}
		}
	}

	private void add(Map<String,Object> projectInfo,String cube_code,Object cube_name, List<Map<String,Object>> colList,List<Map<String,Object>> partColList,List<String> invalidCubeCodeList) throws Exception {
		Object project_id = projectInfo.get("project_id");
		
		log.info("===========add===============");
		log.info("cube_code："+cube_code);
		log.info("project_id："+project_id);
		log.info("project_code："+projectInfo.get("project_code"));
		
		if(invalidCubeCodeList.contains(cube_code)){
		
			//hive表重新创建了，可能字段不一样了，还需要对字段进行比较，只有被使用的字段都存在，才能真正还原其对应状态
			mod(projectInfo,cube_code,colList,partColList,invalidCubeCodeList);
	
		}else{
			String cube_id = insertCube(projectInfo, cube_code, cube_name);
			
			//插入注册表
			commonService.insertObjectRegister("50", cube_id, cube_code);
			
			List<Map<String, Object>> relList = getEntityRelationList(colList, projectInfo.get("busin_id"), cube_id,projectInfo.get("login_code"));
			if(!relList.isEmpty()){
				String sql = "insert into t_meta_entity_relation(element_id,cube_id,status,oper_time) values (?,?,?,?)";
				String columns = "element_id,cube_id,status,oper_time";
				commonDao.insertBatchTableData(relList, sql, columns);
			}
			
			if(partColList != null && partColList.size() > 0){//表可能没有区分
				String oper_time = DateUtils.getCurrentDateTime();
				List<Map<String,Object>> entityList = new ArrayList<Map<String,Object>>();
				for(Map<String,Object> partMap : partColList){
					Map<String,Object> entityMap = new HashMap<String,Object>();
					entityMap.put("cube_id", cube_id);
					entityMap.put("partition_code", partMap.get("col_name"));
					entityMap.put("partition_type", partMap.get("col_type"));
					entityMap.put("oper_time", oper_time);
					entityList.add(entityMap);
				}
				String sql = "insert into t_meta_entity_partition(cube_id,partition_code,partition_type,oper_time) values (?,?,?,?)";
				String columns = "cube_id,partition_code,partition_type,oper_time";
				commonDao.insertBatchTableData(entityList, sql, columns);
			}
		}
	}
	
	private void mod(Map<String,Object> projectInfo, Object cube_code,List<Map<String,Object>> colList,List<Map<String,Object>> partColList,List<String> invalidCubeCodeList) throws Exception {
		Object project_id = projectInfo.get("project_id");
		Object busin_id = projectInfo.get("busin_id");
		
		String sql = "select cube_id,cube_code,object_type,status,logic_id,project_id from t_meta_cubes where object_type in ('11','16','17','18','24','38','39','40','41','42','44','50') and cube_code = ? and project_id = ?";
		
		log.info("=============mod==============");
		log.info("cube_code："+cube_code);
		log.info("project_id："+project_id);
		
		Map<String,Object> cubeMap = dmJdbcTemplate.queryForMap(sql,cube_code,project_id);
		String object_type = CommonUtils.valueOf(cubeMap.get("object_type"));
		Object cube_id = cubeMap.get("cube_id");
		
		List<String> newElementList = new ArrayList<String>();
		
		//获取最新的表字段信息
		List<Map<String, Object>> relList = getNewRelElementList(projectInfo, colList, cube_id, newElementList);
		
		//查询之前字段表关系，做字段级修改比对使用
		List<Map<String, Object>> beforeUsedCubeRelaList = dmJdbcTemplate.queryForList(getElementSql(),cube_id);
		
		boolean isElementChange = isElementChange(colList,partColList, cube_id, beforeUsedCubeRelaList,object_type,busin_id);
		
		String cubeCode = CommonUtils.valueOf(cube_code);
		if(invalidCubeCodeList.contains(cube_code)){//表字段可能被多次修改
			if(StringUtils.equals(object_type, "11") || StringUtils.equals(object_type, "50")){
				if(StringUtils.equals(cubeCode, "t_meta_dim_param") || StringUtils.equals(cubeCode, "id_mapping")){
				}else{
					delOrModFactEffectRestore(cube_id,cube_code,colList,partColList, newElementList, relList, beforeUsedCubeRelaList,object_type,busin_id);
				}
			}
		}else{
			if(StringUtils.equals(object_type, "11") || StringUtils.equals(object_type, "50")){
				if(StringUtils.equals(cubeCode, "t_meta_dim_param") || StringUtils.equals(cubeCode, "id_mapping") && isElementChange){
				}else{
					delOrModFactOper(cubeMap,colList,partColList, newElementList, relList,beforeUsedCubeRelaList,busin_id);
				}
			}
		}
	}

	/**
	 * 用于比对中台库表元数据信息与hive元数据表信息
	 * 总体逻辑:1、中台库表存在，hive元数据表不存在-->表示hive表被删除
	 *        2、中台库表不存在，hive元数据表存在-->表示hive表新增
	 *        3、中台库表与hive元数据表都存在且时间被修改了-->表示hive表字段被修改了(字段可能新增/删除/修改)
	 * @param projectInfo
	 * @param diffMap
	 * @param syncHiveList
	 * @return
	 * @throws Exception
	 */
	private Map<String,Map<String,List<Map<String,Object>>>> compareHiveMeta(Map<String,Object> projectInfo,Map<String, String> diffMap,List<Map<String,Object>> syncHiveList) throws Exception {
		Map<String, Map<String,List<Map<String,Object>>>> hiveColMap = new LinkedHashMap<String,Map<String,List<Map<String,Object>>>>();
		Connection metaConn = null;
		try{
			metaConn = getConnection(projectInfo.get("engine_id"));
			
			String cubeSql = getCubeSql();
			List<Map<String, Object>> dsCubeList = dmJdbcTemplate.queryForList(cubeSql,projectInfo.get("project_id"));
			
			Map<String,String> dcMap = new HashMap<String, String>();
			for (Map<String, Object> dsCube : dsCubeList) {
				String cube_code = CommonUtils.valueOf(dsCube.get("cube_code")).toLowerCase();
				String status = CommonUtils.valueOf(dsCube.get("status"));
				String transient_time = CommonUtils.valueOf(dsCube.get("transient_time"));
				
				dcMap.put(cube_code, status+"@"+transient_time);
			}
			
			String tblSql = getTblSql();
			//hive元数据里所有的表
			List<Map<String, Object>> tblList = JdbcUtil.queryForList(metaConn, tblSql,projectInfo.get("project_code"));
			Map<String, String> hiveTblMap = new HashMap<String, String>();
			for(Map<String,Object> meta : tblList) {
				String tbl_name = CommonUtils.valueOf(meta.get("tbl_name")).toLowerCase();
				//hive表修改了，其对应时间都会变更
				String transient_time = CommonUtils.valueOf(meta.get("transient_time"));
				
				hiveTblMap.put(tbl_name, transient_time); 
				
				if (dcMap.containsKey(tbl_name)) {
					//中台库存在，进一步判断当前状态，若是失效状态，表明之前hive表已经被删除了或者表字段被修改了
					String[] dcStrSplit = dcMap.get(tbl_name).split("@");
					String dc_status = dcStrSplit[0];//状态1:表示被删除了，99:表示表字段被修改了(字段新增/删除/更改)
					String dc_transient_time = "";
					if(dcStrSplit.length == 2){
						dc_transient_time = dcStrSplit[1];
					}
					
					if(!StringUtils.equals(dc_transient_time, transient_time)){
						if(StringUtils.equals(dc_status, "1")){//失效
							diffMap.put(tbl_name, "add");
						}else{
							//字段被修改，状态可能为99
							//时间戳不匹配，视为更新过表或者表结构
							diffMap.put(tbl_name, "mod");
						}
					}
				} else {
					//中台库不存在的，视为新增表或者rename过表名
					diffMap.put(tbl_name, "add");
				}
			}
			
			for(Entry<String, String> entry : dcMap.entrySet()){
				if (!hiveTblMap.containsKey(entry.getKey())) {
					//hive元数据表信息在cube里面不存在，表示表已经被rename表名或者删除了
					diffMap.put(entry.getKey(), "del");
				}
	        }
			
			String colSql = getColSql();
			List<Map<String, Object>> colList = JdbcUtil.queryForList(metaConn,colSql,projectInfo.get("project_id"),DateUtils.getCurrentDateTime(),projectInfo.get("project_code"));
			//所有列信息放在同一个集合里，用于批量同步t_meta_sync_hive
			syncHiveList.addAll(colList);

			String partColSql = getPartitionColSql();
			List<Map<String, Object>> partColList = JdbcUtil.queryForList(metaConn,partColSql,projectInfo.get("project_id"),DateUtils.getCurrentDateTime(),projectInfo.get("project_code"));
			//分区信息
			syncHiveList.addAll(partColList);
			
			//字段信息
			for(Map<String,Object> meta : colList) {
				String tbl_name = CommonUtils.valueOf(meta.get("tbl_name")).toLowerCase();
				if(hiveColMap.containsKey(tbl_name)){
					hiveColMap.get(tbl_name).get("columns").add(meta);
				}else{
					Map<String,List<Map<String,Object>>> colsMap = new HashMap<String,List<Map<String,Object>>>();
					List<Map<String,Object>> colsInfo = new ArrayList<Map<String,Object>>();
					colsInfo.add(meta);
					colsMap.put("columns", colsInfo);
					hiveColMap.put(tbl_name,colsMap);
				}
			}
			
			//分区信息
			for(Map<String,Object> meta : partColList) {
				String tbl_name = CommonUtils.valueOf(meta.get("tbl_name")).toLowerCase();
				if(hiveColMap.get(tbl_name) != null && hiveColMap.get(tbl_name).get("partition") == null){
					List<Map<String,Object>> partInfo = new ArrayList<Map<String,Object>>();
					partInfo.add(meta);
					hiveColMap.get(tbl_name).put("partition", partInfo);
				}else{
					hiveColMap.get(tbl_name).get("partition").add(meta);
				}
			}
		}catch(Exception e){
			throw new Exception(e.getMessage()); 
		}finally{
			JdbcUtil.close(metaConn);
		}
		return hiveColMap;
	}

	

	private void delOrModFactOper(Map<String, Object> cubeMap,List<Map<String,Object>> colList,List<Map<String,Object>> partColList,List<String> newElementList, 
			List<Map<String, Object>> relList,List<Map<String, Object>> beforeUsedCubeRelaList,Object busin_id) throws Exception {
		Object cube_id = cubeMap.get("cube_id");
		
		updateEntityRel(cube_id, relList);
		
		updatePartitionRel(cube_id,partColList);
		
	}
	
	private void delOrModFactEffectRestore(Object cube_id,Object cube_code,List<Map<String,Object>> colList,List<Map<String,Object>> partColList,
			List<String> newElementList,List<Map<String, Object>> relList, List<Map<String, Object>> beforeUsedCubeRelaList,String object_type,Object busin_id) throws Exception {
		
		String sql = "update t_meta_cubes set status = '6' where cube_id = ?";
		dmJdbcTemplate.update(sql,cube_id);
		
		//表字段被修改过，字段关系重新插入
		updateEntityRel(cube_id, relList);
		
		updatePartitionRel(cube_id, partColList);
	}

	private boolean isElementChange(List<Map<String, Object>> colList,List<Map<String,Object>> partColList, 
			Object cube_id,List<Map<String, Object>> beforeUsedCubeRelaList,String object_type,Object busin_id) {
		
		StringBuffer elementSql = new StringBuffer();
		
		List<Object> colsInfo = new LinkedList<Object>();
		for(Map<String,Object> colMap : colList){
			String element_code = CommonUtils.valueOf(colMap.get("col_name")).toLowerCase();
			String element_type = CommonUtils.valueOf(colMap.get("col_type"));
			
			elementSql.append("select ele.element_code");
			elementSql.append("  from t_meta_entity_relation rel, t_meta_element ele");
			elementSql.append(" where rel.element_id = ele.element_id");
			elementSql.append("   and ele.status = '0'");
			elementSql.append("   and ele.element_code = ?");
			elementSql.append("   and ele.element_type = ?");
			elementSql.append("   and rel.cube_id = ?");
			elementSql.append(" union all ");
			
			colsInfo.add(element_code);
			colsInfo.add(element_type);
			colsInfo.add(cube_id);
		}
		//删除最后一个union all
		elementSql.delete(elementSql.length()-10, elementSql.length());
		
		List<Map<String,Object>> elementInfo = dmJdbcTemplate.queryForList(elementSql.toString(),colsInfo.toArray());
		
		boolean isElementChange = true;
		if(elementInfo.size() == beforeUsedCubeRelaList.size()){
			isElementChange = false;
		}
		//系统构建的表
		//维度、事实逻辑、派生指标、汇总逻辑、工厂标签、标签融合确定都是用默认的分区,其他的待添加
		List<String> fixDsList = new ArrayList<String>();
		fixDsList.add(ConstantUtil.DIMLOGIC_OBJECT_TYPE);
		fixDsList.add(ConstantUtil.FACTLOGIC_OBJECT_TYPE);
		fixDsList.add(ConstantUtil.SUMMARYTEMP_OBJECT_TYPE);
		fixDsList.add(ConstantUtil.SUMMARYLOGIC_OBJECT_TYPE);
		fixDsList.add(ConstantUtil.FACTORYLABEL_OBJECT_TYPE);
		fixDsList.add(ConstantUtil.LOGICLABEL_OBJECT_TYPE);
		
		boolean isPartElementChange = true;
		if(fixDsList.contains(object_type)){
			Map<String, Object> dsMap = commonService.getDsInfo(busin_id);
			
			String partition_code = CommonUtils.valueOf(dsMap.get("partition_code"));
			String data_type = CommonUtils.valueOf(dsMap.get("data_type"));
			
			if(partColList != null && partColList.size() == 1){
				Map<String,Object> partMap = partColList.get(0);
				String partCode = CommonUtils.valueOf(partMap.get("col_name"));
				String partType = CommonUtils.valueOf(partMap.get("col_type"));
				if(StringUtils.equals(partition_code, partCode) && StringUtils.equals(data_type, partType)){
					isPartElementChange = false;
				}
			}
		}else{
			//object_type:11,50,38,41等分区不确定或者多个
			//t_bl_behavior_detail、t_bl_behavior_detail_sample
			if(partColList != null && partColList.size() > 0){
				StringBuffer partSql = new StringBuffer();
				List<Object> partColsInfo = new LinkedList<Object>();
				for(Map<String,Object> colMap : partColList){
					String element_code = CommonUtils.valueOf(colMap.get("col_name")).toLowerCase();
					
					partSql.append("select partition_code from t_meta_entity_partition where partition_code = ? and partition_type = ? and cube_id = ?");
					partSql.append(" union all ");
					
					partColsInfo.add(element_code);
					partColsInfo.add(colMap.get("col_type"));
					partColsInfo.add(cube_id);
				}
				partSql.delete(partSql.length()-10, partSql.length());
				
				List<Map<String,Object>> partInfo = dmJdbcTemplate.queryForList(partSql.toString(),partColsInfo.toArray());

				if(partInfo.size() == partColList.size()){
					isPartElementChange = false;
				}
			}else{
				String sql = "select 1 from t_meta_entity_partition where cube_id = ?";
				List<Map<String,Object>> entityPartList = dmJdbcTemplate.queryForList(sql,cube_id);
				if(entityPartList.size() == 0){//都没有分区信息，表示当前表以前就没有分区
					isPartElementChange = false;
				}
			}
		}		
		return isElementChange || isPartElementChange;//只有都是false的时候，字段才没有改变
	}
	
	private void delFactOper(Map<String,Object> cubeMap) {
		Object cube_id = cubeMap.get("cube_id");
		delFactInfo(cube_id);
	}

	private void delFactInfo(Object cube_id) {
		String sql = "delete from t_meta_cube_relation where (parent_id = ? or child_id = ?)";
		dmJdbcTemplate.update(sql,cube_id,cube_id);
		
		//表删除了，对应表字段打上标记，作为一个判断依据，用于后面清理数据
		sql = "update t_meta_element set comment ='sync' where element_id in(select element_id from t_meta_entity_relation where cube_id = ?)";
		dmJdbcTemplate.update(sql,cube_id);
		
		sql = "delete from t_meta_entity_relation where cube_id = ?";
		dmJdbcTemplate.update(sql,cube_id);
		
		sql = "delete from t_meta_entity_partition where cube_id = ?";
		dmJdbcTemplate.update(sql,cube_id);
		
		//sql = "delete from t_meta_sub_object_register where object_id in(select id from t_meta_object_register where object_id = ?)";
		//dmJdbcTemplate.update(sql,cube_id);
		
		//如果对应子注册表中有记录，说明表被使用了，不能删除，需要人为干预，将删除表还原回来
		sql = "delete from t_meta_object_register where object_id = ?";
		dmJdbcTemplate.update(sql,cube_id);
		
		sql = "delete from t_meta_object_lifecycle where object_id = ?";
		dmJdbcTemplate.update(sql,cube_id);
		
		sql = "delete from t_asset_m_trail where id = ?";
		dmJdbcTemplate.update(sql,cube_id);
		
		sql = "delete from t_meta_cubes where cube_id = ?";
		dmJdbcTemplate.update(sql,cube_id);
	}

	private void updateEntityRel(Object cube_id, List<Map<String, Object>> relList) {
		String sql = "delete from t_meta_entity_relation where cube_id = ?";
		dmJdbcTemplate.update(sql,cube_id);
		if(!relList.isEmpty()){
			sql = "insert into t_meta_entity_relation(element_id,cube_id,status,oper_time) values (?,?,?,?)";
			String columns = "element_id,cube_id,status,oper_time";
			commonDao.insertBatchTableData(relList, sql, columns);
		}
	}
	
	private void updatePartitionRel(Object cube_id, List<Map<String, Object>> partitonList) {
		String sql = "delete from t_meta_entity_partition where cube_id = ?";
		dmJdbcTemplate.update(sql,cube_id);
		if(!partitonList.isEmpty()){
			List<Map<String,Object>> partList = new ArrayList<Map<String,Object>>();
			for(Map<String,Object> map : partitonList){
				Map<String,Object> part = new HashMap<String,Object>();
				part.put("cube_id", cube_id);
				part.put("partition_code", map.get("col_name"));
				part.put("partition_type", map.get("col_type"));
				part.put("oper_time", new Date());
				partList.add(part);
			}
			sql = "insert into t_meta_entity_partition(cube_id,partition_code,partition_type,oper_time) values (?,?,?,?)";
			String columns = "cube_id,partition_code,partition_type,oper_time";
			commonDao.insertBatchTableData(partList, sql, columns);
		}
	}

	private List<Map<String, Object>> getNewRelElementList(Map<String, Object> projectInfo,List<Map<String, Object>> colList, Object cube_id, List<String> newElementList) {
		List<Map<String,Object>> relList = new ArrayList<Map<String,Object>>(); 
		for(Map<String,Object> colMap : colList){
			String element_code = CommonUtils.valueOf(colMap.get("col_name")).toLowerCase();
			String element_type = CommonUtils.valueOf(colMap.get("col_type")).toLowerCase();
			
			String sql = "select element_id from t_meta_element where element_code = ? and element_type = ?";
			List<Map<String,Object>> eleList = dmJdbcTemplate.queryForList(sql,element_code,element_type);
			String element_id = "";
			if(eleList.isEmpty()){
				element_id = insertElement(projectInfo.get("busin_id"), colMap,projectInfo.get("login_code"));
			}else{
				element_id = CommonUtils.valueOf(eleList.get(0).get("element_id"));
			}
			newElementList.add(element_id);
			Map<String,Object> relMap = new HashMap<String,Object>();
			relMap.put("element_id", element_id);
			relMap.put("cube_id", cube_id);
			relMap.put("status", "0");
			relMap.put("oper_time", DateUtils.getCurrentDateTime());
			relList.add(relMap);
		}
		return relList;
	}

	private Connection getConnection(Object engine_id) throws Exception{
		String sql = "select param_key, param_value from t_meta_engine_param where id = ?";
		List<Map<String, Object>> engineParamList = dmJdbcTemplate.queryForList(sql,engine_id);
		
		Map<String,String> data = new HashMap<String,String>();
		for(Map<String,Object> engineParam : engineParamList) {
			String param_key = CommonUtils.valueOf(engineParam.get("param_key"));
			String param_value = CommonUtils.valueOf(engineParam.get("param_value"));
			data.put(param_key, param_value);
		}
		String metajdbcurl = data.get("metastore_host");
		String metajdbcuser = data.get("metastore_user_name");
		String metajdbcpasswd = data.get("metastore_user_passwd");
		metajdbcpasswd = AesEncryptUtils.decrypt(metajdbcpasswd);
		
		Connection metaConn = JdbcUtil.getConnection(metajdbcurl, metajdbcuser, metajdbcpasswd, "mysql");
		
		return metaConn;
	}
	
	private String insertElement(Object busin_id, Map<String, Object> colMap,Object login_code) {
		String element_code = CommonUtils.valueOf(colMap.get("col_name"));
		String element_type = CommonUtils.valueOf(colMap.get("col_type"));
		String element_name = CommonUtils.valueOf(colMap.get("col_comment"));
		if(StringUtils.isBlank(element_name)){
			element_name = element_code;
		}
		String oper_time = DateUtils.getCurrentDateTime();
		
		String element_id = CommonUtils.generateId();
		StringBuilder sqlSb = new StringBuilder("insert into t_meta_element (");
		Map<String,Object> eleMap = new HashMap<String,Object>();
		eleMap.put("element_id", element_id);
		eleMap.put("element_code",element_code);
		eleMap.put("element_name", element_name);
		eleMap.put("element_type", element_type);
		eleMap.put("status", "0");
		eleMap.put("auth_status", "0");
		eleMap.put("insert_type","insert");
		eleMap.put("oper_user", login_code);
		eleMap.put("oper_time", oper_time);
		commonDao.insertTableData(eleMap, sqlSb);
		
		sqlSb = new StringBuilder("insert into t_meta_field_relation (");
		Map<String,Object> fieldMap = new HashMap<String,Object>();
		fieldMap.put("busin_id", busin_id);
		fieldMap.put("element_id", element_id);
		fieldMap.put("oper_time", oper_time);
		commonDao.insertTableData(fieldMap, sqlSb);
		
		return element_id;
	}

	private String insertCube(Map<String, Object> projectInfo, String cube_code, Object cube_name) {
		String cube_id = CommonUtils.generateId();
		StringBuilder cubeSqlSb = new StringBuilder("insert into t_meta_cubes (");
		Map<String,Object> mapCube = new HashMap<String,Object>();
		mapCube.put("cube_id", cube_id);
		mapCube.put("cube_code", cube_code);
		mapCube.put("cube_name", cube_name);
		mapCube.put("project_id", projectInfo.get("project_id"));
		mapCube.put("project_name", projectInfo.get("project_name"));
		mapCube.put("project_code", projectInfo.get("project_code"));
		mapCube.put("business_field_id", projectInfo.get("busin_id"));
		mapCube.put("business_field_name", projectInfo.get("busin_name"));
		mapCube.put("dw_level", "sync");
		mapCube.put("object_type", "50");
		mapCube.put("status", "6");
		mapCube.put("storage_type","hive");
		mapCube.put("oper_user", projectInfo.get("login_code"));
		mapCube.put("create_time", DateUtils.getCurrentDateTime());
		commonDao.insertTableData(mapCube, cubeSqlSb);
		return cube_id;
	}

	private List<Map<String, Object>> getEntityRelationList(List<Map<String, Object>> colList, Object busin_id,Object cube_id,Object login_code) {
		List<Map<String,Object>> relList = new ArrayList<Map<String,Object>>(); 
		for(Map<String,Object> colMap : colList){
			String element_code = CommonUtils.valueOf(colMap.get("col_name"));
			String element_type = CommonUtils.valueOf(colMap.get("col_type"));
			
			String sql = "select element_id from t_meta_element where element_code = ? and element_type = ?";
			List<Map<String,Object>> eleList = dmJdbcTemplate.queryForList(sql,element_code,element_type);
			Object element_id = null;
			if(eleList.isEmpty()){
				element_id = insertElement(busin_id, colMap,login_code);
			}else{
				element_id = eleList.get(0).get("element_id");
			}
			Map<String,Object> relMap = new HashMap<String,Object>();
			relMap.put("element_id", element_id);
			relMap.put("cube_id", cube_id);
			relMap.put("status", "0");
			relMap.put("oper_time", DateUtils.getCurrentDateTime());
			relList.add(relMap);
		}
		return relList;
	}
	
	private String getElementSql() {
		StringBuffer eleSql = new StringBuffer();
		eleSql.append("select ele.element_id, ele.element_code, ele.element_type, ele.element_name");
		eleSql.append("  from t_meta_entity_relation rel, t_meta_element ele");
		eleSql.append(" where rel.element_id = ele.element_id");
		eleSql.append("   and ele.status = '0'");
		eleSql.append("   and rel.cube_id = ?");
		return eleSql.toString();
	}
	
	private String getTblSql() {
		StringBuffer tblSql = new StringBuffer();
		tblSql.append("select t.tbl_id, t.create_time, t.tbl_name, t.tbl_type, tp.param_value as transient_time");
		tblSql.append(" from TBLS t, DBS d, TABLE_PARAMS tp");
		tblSql.append(" where d.db_id = t.db_id");
		tblSql.append("  and t.tbl_id = tp.tbl_id");
		tblSql.append("  and tp.param_key in ('transient_lastDdlTime')");
		tblSql.append("  and d.name = ?");
		return tblSql.toString();
	}

	private String getColSql() {
		StringBuffer colSql = new StringBuffer();
		colSql.append("select ? as project_id,");
		colSql.append("       db.db_id,");
		colSql.append("       db.name              as db_name,");
		colSql.append("       tbl.tbl_id,");
		colSql.append("       tbl.tbl_name,");
		colSql.append("       tp.param_value       as tbl_comment,");
		colSql.append("       tbl.create_time      as tbl_create_time,");
		colSql.append("       tbl.tbl_type,");
		colSql.append("       tp_trans.param_value as transient_time,");
		colSql.append("       col.column_name      as col_name,");
		colSql.append("       col.comment          as col_comment,");
		colSql.append("       col.type_name        as col_type,");
		colSql.append("       col.integer_idx      as col_integer_idx,");
		colSql.append("       '0' as col_category,");//0:字段，1:分区
		colSql.append("       ? as last_sync_time");
		colSql.append("  from tbls tbl");
		colSql.append("  left join table_params tp on tp.tbl_id = tbl.tbl_id and tp.param_key = 'comment'");
		colSql.append("  left join table_params tp_trans on tp_trans.tbl_id = tbl.tbl_id and tp_trans.param_key = 'transient_lastDdlTime'");
		colSql.append(" inner join dbs db on db.db_id = tbl.db_id and db.name = ?");
		colSql.append(" inner join sds sd on tbl.sd_id = sd.sd_id");
		colSql.append(" inner join columns_v2 col on sd.cd_id = col.cd_id");
		return colSql.toString();
	}

	private String getCubeSql() {
		StringBuffer cubeSql = new StringBuffer();
		cubeSql.append("select distinct cubes.cube_code, hive.transient_time,cubes.status");
		cubeSql.append("  from t_meta_cubes cubes");
		cubeSql.append("  left join t_meta_sync_hive hive");
		cubeSql.append("    on cubes.project_id = hive.project_id");
		cubeSql.append("   and cubes.cube_code = hive.tbl_name");
		cubeSql.append(" where cubes.object_type in ('11','16','17','18','24','38','39','40','41','42','44','50')");
		cubeSql.append("   and cubes.project_id = ?");
		return cubeSql.toString();
	}
	
	private String getPartitionColSql() {
		StringBuffer partColSql = new StringBuffer();
		partColSql.append("select ? as project_id,");
		partColSql.append("       db.db_id,");
		partColSql.append("       db.name as db_name,");
		partColSql.append("       tbl.tbl_id,");
		partColSql.append("       tbl.tbl_name,");
		partColSql.append("       tp.param_value as tbl_comment,");
		partColSql.append("       tbl.create_time as tbl_create_time,");
		partColSql.append("       tbl.tbl_type,");
		partColSql.append("       tp_trans.param_value as transient_time,");
		partColSql.append("       part.pkey_name as col_name,");
		partColSql.append("       part.pkey_comment as col_comment,");
		partColSql.append("       part.pkey_type as col_type,");
		partColSql.append("       part.integer_idx as col_integer_idx,");
		partColSql.append("       '1' as col_category,");//0:字段,1:分区
		partColSql.append("       ? as last_sync_time");
		partColSql.append("  from tbls tbl");
		partColSql.append("  left join table_params tp on tp.tbl_id = tbl.tbl_id and tp.param_key = 'comment'");
		partColSql.append("  left join table_params tp_trans on tp_trans.tbl_id = tbl.tbl_id and tp_trans.param_key = 'transient_lastDdlTime'");
		partColSql.append(" inner join dbs db on db.db_id = tbl.db_id and db.name = ?");
		partColSql.append(" inner join partition_keys part on tbl.tbl_id = part.tbl_id");
		
		return partColSql.toString();
	}
}