package com.walker.service.catalogue;

import java.sql.Connection;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.stereotype.Service;

import com.walker.bean.Consanguinity;
import com.walker.service.common.CommonService;
import com.walker.service.meta.AssetMetaDataCollectServ;
import com.walker.service.meta.SyncMetaHiveService;
import com.walker.util.AesEncryptUtils;
import com.walker.util.CommonDao;
import com.walker.util.CommonUtils;
import com.walker.util.ConstantUtil;
import com.walker.util.DateUtils;
import com.walker.util.ESUtils;
import com.walker.util.JdbcUtil;
import com.walker.util.PrestoUtil;

@Service
public class CatalogueDataTableService {

	private static final Log log = LogFactory.getLog(CatalogueDataTableService.class);
	
	@Autowired
	@Qualifier("dmJdbcTemplate")
	private JdbcTemplate dmJdbcTemplate;
	
	@Autowired
	@Qualifier("dsJdbcTemplate")
	private JdbcTemplate dsJdbcTemplate;
	
	@Autowired
	private CommonDao commonDao;
	
	@Autowired
	private CommonService commonService;
	
	@Autowired
	private AssetMetaDataCollectServ assetMetaDataCollectServ;
	
	@Autowired
	private SyncMetaHiveService syncMetaHiveService;
	
	@Autowired
	private PrestoUtil presto;
	
	@Autowired
	private ESUtils es;
	
	/**
	 * 数据表列表
	 * @param params
	 * @param login_code
	 * @return
	 */
	public List<Map<String, Object>> listPage(Map<String,Object> params,String login_code) {
		String pageSize = CommonUtils.valueOf(params.get("pageSize"));
		String pageNo = CommonUtils.valueOf(params.get("pageNo"));
		String keyword = CommonUtils.valueOf(params.get("keyword"));
		String businessFieldId = CommonUtils.valueOf(params.get("business_field_id"));
		String tableType = CommonUtils.valueOf(params.get("table_type"));
		String projectId = CommonUtils.valueOf(params.get("project_id"));
		String source_type = CommonUtils.valueOf(params.get("source_type"));
		
		NamedParameterJdbcTemplate givenParamJdbcTemp = new NamedParameterJdbcTemplate(dmJdbcTemplate);

		StringBuffer sql = getCubeSql();
		
		Map<String, Object> args = new HashMap<>();
		args.put("keyword", "%" + keyword + "%");
		args.put("businessFieldId", businessFieldId);
		args.put("projectId", projectId);
		args.put("pageSize", Integer.parseInt(pageSize));
		args.put("pageNo", (Integer.parseInt(pageNo) - 1) * Integer.parseInt(pageSize));
		args.put("source_type", source_type);
		
		if (StringUtils.isNotBlank(keyword)) {
			sql.append(" and (t1.table_name like :keyword or t1.table_code like :keyword or t2.project_name like :keyword)");
		}
		if (StringUtils.isNotBlank(businessFieldId)) {
			sql.append(" and t2.busin_id =:businessFieldId");
		}
		if (StringUtils.isNotBlank(tableType)) {
			if(StringUtils.equals(tableType, "11") || StringUtils.equals(tableType, "50")){
				sql.append(" and t1.object_type in('11','50')");
			}else if(StringUtils.equals(tableType, "42")){
				sql.append(" and t1.object_type in ('37','38','39','40','42','43')");
			}else{
				sql.append(" and t1.object_type ='"+tableType+"'");
			}
		}
		if (StringUtils.isNotBlank(projectId)) {
			sql.append(" and t2.project_id =:projectId");
		}
		if(StringUtils.isNotBlank(source_type)){
			sql.append(" and t1.db_type =:source_type");
		}

		if (StringUtils.isNotBlank(pageSize)) {
			sql.append(" limit :pageSize offset :pageNo");
		}

		List<Map<String, Object>> result = givenParamJdbcTemp.queryForList(sql.toString(), args);
		for (Map<String, Object> map : result) {
			String raw_size = CommonUtils.valueOf(map.get("raw_size"));
			raw_size = CommonUtils.formateRawSize(raw_size);
			map.put("raw_size",raw_size);
			
			String trailSql = "select is_collect from t_asset_m_trail where id = ? and login_code = ?";
			List<Map<String, Object>> collects = dmJdbcTemplate.queryForList(trailSql,new Object[] { map.get("cube_id"), login_code });
			if (collects.isEmpty()) {
				map.put("is_collect", 0);
			} else {
				map.put("is_collect", Integer.parseInt(collects.get(0).get("is_collect").toString()));
			}
		}
		return result;
	}
	
	/**
	 * 数据表数
	 * @param params
	 * @return
	 */
	public String listCount(Map<String,Object> params) {
		String keyword = CommonUtils.valueOf(params.get("keyword"));
		String businessFieldId = CommonUtils.valueOf(params.get("business_field_id"));
		String tableType = CommonUtils.valueOf(params.get("table_type"));
		String projectId = CommonUtils.valueOf(params.get("project_id"));
		String source_type = CommonUtils.valueOf(params.get("source_type"));
		
		NamedParameterJdbcTemplate givenParamJdbcTemp = new NamedParameterJdbcTemplate(dmJdbcTemplate);

		StringBuffer sql = new StringBuffer();
		sql.append("select count(1) count");
		sql.append(" from t_asset_meta_tab_collect t1 left join t_asset_meta_struct t2");
		sql.append("    on t1.table_id = t2.table_id");
		sql.append(" where 1 = 1");
		
		Map<String, Object> args = new HashMap<>();
		args.put("keyword", "%" + keyword + "%");
		args.put("businessFieldId", businessFieldId);
		args.put("projectId", projectId);
		args.put("source_type", source_type);
		if (StringUtils.isNotBlank(keyword)) {
			sql.append(" and (t1.table_name like :keyword or t1.table_code like :keyword or t2.project_name like :keyword)");
		}
		if (StringUtils.isNotBlank(businessFieldId)) {
			sql.append(" and t2.busin_id =:businessFieldId");
		}
		if (StringUtils.isNotBlank(tableType)){
			if(StringUtils.equals(tableType, "11") || StringUtils.equals(tableType, "50")){
				sql.append(" and t1.object_type in('11','50')");
			}else if(StringUtils.equals(tableType, "42")){
				sql.append(" and t1.object_type in ('37','38','39','40','42','43')");
			}else{
				sql.append(" and t1.object_type ='"+tableType+"'");
			}
			
		}
		if(StringUtils.isNotBlank(source_type)){
			sql.append(" and t1.db_type =:source_type");
		}
		if (StringUtils.isNotBlank(projectId)) {
			sql.append(" and t2.project_id =:projectId");
		}

		return givenParamJdbcTemp.queryForMap(sql.toString(), args).get("count").toString();
	}
	
	/**
	 * 分区信息
	 * @param params
	 * @return
	 */
	public Map<String, Object> getPartitionInfo(Map<String,Object> params) {
		Object cube_id = params.get("cube_id");
		String end_time = CommonUtils.valueOf(params.get("end_time"));
		
		Map<String, Object> object = new HashMap<String, Object>();

		List<Map<String, Object>> partitions = new ArrayList<Map<String, Object>>();
		List<Object> param = new ArrayList<Object>();
		List<String> columns = new ArrayList<String>();

		String sql = "select a.partition_column,group_concat(date_format(a.create_time,'%Y-%m-%d')) as create_time ,group_concat(a.num_rows) as num_rows,group_concat(a.raw_size) as raw_size,group_concat(a.hdfs_size) as hdfs_size "
				+ "from t_asset_sd_collect a inner join t_meta_cubes b on a.busin_id=b.business_field_id and a.table_code=b.cube_code inner join t_meta_project c on b.project_id=c.id and c.project_code=a.project_code "
				+ "where b.cube_id=?";
		param.add(cube_id);

		if (StringUtils.isNotBlank(end_time)) {
			sql = sql + " and date_format(a.create_time,'%Y-%m-%d')<=date_format(?,'%Y-%m-%d')";
			param.add(end_time);
		}
		sql = sql + " group by a.partition_column";

		List<Map<String, Object>> result = dmJdbcTemplate.queryForList(sql, param.toArray());

		for (Map<String, Object> map : result) {
			Map<String, Object> p = new HashMap<String, Object>();
			p.put("column_name", map.get("partition_column"));
			columns.add(map.get("partition_column").toString());

			String[] cts = map.get("create_time").toString().split(",");
			String[] nrs = map.get("num_rows").toString().split(",");
			String[] rss = map.get("raw_size").toString().split(",");
			String[] hss = map.get("hdfs_size").toString().split(",");

			List<Map<String, Object>> datas = new ArrayList<Map<String, Object>>();
			for (int i = 0; i < cts.length; i++) {
				Map<String, Object> data = new HashMap<String, Object>();
				data.put("create_time", cts[i]);
				data.put("num_rows", nrs[i]);
				data.put("raw_size", rss[i]);
				data.put("hdfs_size", hss[i]);

				datas.add(data);
			}
			p.put("partitions", datas);

			partitions.add(p);
		}

		object.put("partitions", partitions);
		object.put("columns", columns);
		return object;
	}
	
	/**
	 * 存储类型
	 * @return
	 */
	public List<Map<String,Object>> getSourceType(){
		List<Map<String,Object>> list = new ArrayList<Map<String,Object>>();
		Map<String,Object> map = new HashMap<String,Object>();
		map.put("source_type","Hive");
		map.put("source_type_name","Hive");
		list.add(map);
		
		map = new HashMap<String,Object>();
		map.put("source_type","Tidb");
		map.put("source_type_name","Tidb");
		list.add(map);
		
		map = new HashMap<String,Object>();
		map.put("source_type","clickhouse");
		map.put("source_type_name","clickhouse");
		list.add(map);
		
		map = new HashMap<String,Object>();
		map.put("source_type","elasticsearch");
		map.put("source_type_name","elasticsearch");
		list.add(map);
		
		/*map = new HashMap<String,Object>();
		map.put("source_type","es");
		map.put("source_type_name","es");
		list.add(map);*/
		
		map = new HashMap<String,Object>();
		map.put("source_type","mysql");
		map.put("source_type_name","mysql");
		list.add(map);
		
		return list;
	}
	
	/**
	 * 数据预览
	 * @param params
	 * @param login_code
	 * @return
	 * @throws Exception
	 */
	public Map<String, Object> previewData(Map<String,Object> params,String login_code) throws Exception {
		Object cube_id = params.get("cube_id");
		String source_type = CommonUtils.valueOf(params.get("source_type"));
		
		Map<String, Object> result = new HashMap<String, Object>();
		if(StringUtils.equalsIgnoreCase(source_type, "hive")){
			Map<String, Object> map = dmJdbcTemplate.queryForMap(
					"select a.cube_code,a.project_id,b.project_code,GROUP_CONCAT(d.element_code) as columns "
							+ "from t_meta_cubes a inner join t_meta_project b on a.project_id=b.id "
							+ "inner join t_meta_entity_relation c on a.cube_id=c.cube_id "
							+ "inner join t_meta_element d on c.element_id=d.element_id " + "where a.cube_id=? ",
					new Object[] { cube_id });

			String[] columns = map.get("columns").toString().split(",");
			List<String> cl = new ArrayList<String>();
			for (String column : columns) {
				cl.add(column);
			}
			result.put("columns", cl);

			String hiveSql = "select " + map.get("columns") + " from " + map.get("project_code") + "."
					+ map.get("cube_code") + " limit 10";

			String project_code = CommonUtils.valueOf(map.get("project_code"));
			List<Map<String, Object>> list = presto.queryForList(project_code, login_code, hiveSql);
			
			result.put("data", list);
		}else if(StringUtils.equalsIgnoreCase(source_type, "tidb") || StringUtils.equals(source_type, "clickhouse") || StringUtils.equalsIgnoreCase(source_type, "mysql")){
			Connection conn = null;
			try{
				String sql = "select db_code,table_code,jdbc_url,jdbc_user,jdbc_pw from t_asset_meta_tab_collect where table_id = ? limit 1";
				Map<String,Object> tabMap = dmJdbcTemplate.queryForMap(sql,cube_id);
				Object db_code = tabMap.get("db_code");
				Object table_code = tabMap.get("table_code");
				String jdbc_url = CommonUtils.valueOf(tabMap.get("jdbc_url"));
				String urlStr = jdbc_url.substring(jdbc_url.lastIndexOf(":"));
				if(urlStr.lastIndexOf("/") < 0){
					jdbc_url = jdbc_url + "/"+db_code;
				}
				String jdbc_user = CommonUtils.valueOf(tabMap.get("jdbc_user"));
				String jdbc_pw = CommonUtils.valueOf(tabMap.get("jdbc_pw"));
				if(StringUtils.isNotBlank(jdbc_pw)){
					jdbc_pw = AesEncryptUtils.decrypt(jdbc_pw);
				}
				
				if(StringUtils.equals(source_type, "clickhouse")){
					conn = JdbcUtil.getConnection(jdbc_url, jdbc_user, jdbc_pw, "ck");
				}else{
					conn = JdbcUtil.getConnection(jdbc_url, jdbc_user, jdbc_pw, "mysql");
				}
				
				sql = "select group_concat(column_code) as column_code from t_asset_meta_col_collect where table_id = ?";
				Map<String,Object> colMap = dmJdbcTemplate.queryForMap(sql,cube_id);
				String[] columns = CommonUtils.valueOf(colMap.get("column_code")).split(",");
				List<String> cl = new ArrayList<String>();
				for (String column : columns) {
					cl.add(column);
				}
				result.put("columns", cl);
				
				String querySql = "select "+colMap.get("column_code")+" from "+table_code + " limit 10";
				
				List<Map<String,Object>> list = JdbcUtil.queryForList(conn, querySql, new Object[]{});
				
				result.put("data", list);
			}catch(Exception e){
				if(log.isErrorEnabled()){
					log.error(e.getMessage(),e);
				}
			}finally{
				JdbcUtil.close(conn);
			}
		}else if(StringUtils.equalsIgnoreCase(source_type, "elasticsearch") || StringUtils.equalsIgnoreCase(source_type, "es")){
			String sql = "select group_concat(fields_code) as column_code from(select fields_code from t_asset_es_fields_collect where index_id = ? group by fields_code) t";
			Map<String,Object> colMap = dmJdbcTemplate.queryForMap(sql,cube_id);
			String[] columns = CommonUtils.valueOf(colMap.get("column_code")).split(",");
			List<String> cl = new ArrayList<String>();
			for (String column : columns) {
				cl.add(column);
			}
			result.put("columns", cl);
			
			//String querySql = "select "+colMap.get("column_code")+" from "+table_code + " limit 10";
			sql = "select table_code,jdbc_url,jdbc_user,jdbc_pw from t_asset_meta_tab_collect where table_id = ?";
			Map<String,Object> tableMap =  dmJdbcTemplate.queryForMap(sql,cube_id);
			String table_code = CommonUtils.valueOf(tableMap.get("table_code"));
			String jdbc_url = CommonUtils.valueOf(tableMap.get("jdbc_url"));
			String jdbc_user = CommonUtils.valueOf(tableMap.get("jdbc_user"));
			String jdbc_pw = CommonUtils.valueOf(tableMap.get("jdbc_pw"));
			if(StringUtils.isNotBlank(jdbc_pw)){
				jdbc_pw = AesEncryptUtils.decrypt(jdbc_pw);
			}
			List<Map<String,Object>> list = es.getIndexDocument(jdbc_url, jdbc_user, jdbc_pw, table_code, 10);
			List<Map<String,String>> datas = new ArrayList<Map<String,String>>();
			for(Map<String,Object> map : list){
				Map<String,String> data = new HashMap<String,String>();
				for(Map.Entry<String, Object> entry : map.entrySet()){
					String key = entry.getKey();
					String value = CommonUtils.valueOf(entry.getValue());
					data.put(key, value);
				}
				datas.add(data);
			}
			result.put("data", datas);
		}
		return result;
	}
	
	/**
	 * 收藏/取消收藏
	 * @param params
	 * @param login_code
	 */
	public void setCollect(Map<String,Object> params,String login_code) {
		String id = CommonUtils.valueOf(params.get("id"));
		String isCollect = CommonUtils.valueOf(params.get("isCollect"));
		String source_type = CommonUtils.valueOf(params.get("source_type"));
		String project_code = CommonUtils.valueOf(params.get("project_code"));
		
		//0:非收藏，1:收藏
		if(StringUtils.equals(isCollect, "1")){
			String sql = "select 1 from t_asset_m_trail where id = ? and login_code = ?";
			if(dmJdbcTemplate.queryForList(sql,id,login_code).isEmpty()){
				Map<String,Object> cubeMap = null;
				if(StringUtils.equalsIgnoreCase(source_type, "hive")){
					sql = "select cube_code,cube_name,object_type,project_code,'hive' as source_type from t_meta_cubes where cube_id = ?";
					cubeMap = dmJdbcTemplate.queryForMap(sql,id);
				}else{
					sql = "select t1.table_code as cube_code,case when t1.table_name is null or t1.table_name ='' then t1.table_code else table_name end as cube_name,t1.object_type,t1.ds_id,t2.project_code,t1.db_type as source_type from t_asset_meta_tab_collect t1 left join t_asset_meta_struct t2 on t1.table_id = t2.table_id where t1.table_id = ? and t2.project_code = ?";
					cubeMap = dmJdbcTemplate.queryForMap(sql,id,project_code);
				}
				sql = "insert into t_asset_m_trail(id,object_type,object_code,object_name,hot_value,trail_date,is_collect,project_code,login_code,source_type,ds_id) values (?,?,?,?,?,?,?,?,?,?,?)";
				dmJdbcTemplate.update(sql,new Object[] { id,cubeMap.get("object_type"), cubeMap.get("cube_code"), cubeMap.get("cube_name"), 1, new Date(), isCollect,cubeMap.get("project_code"),login_code,source_type,cubeMap.get("ds_id")});
			}else{
				dmJdbcTemplate.update("update t_asset_m_trail set is_collect=? where id = ? and login_code = ?", new Object[] { isCollect, id ,login_code});
			}
		}else{
			dmJdbcTemplate.update("update t_asset_m_trail set is_collect=? where id=? and login_code = ?", new Object[] { isCollect, id,login_code });
		}
	}
	
	/**
	 * 血缘信息
	 * @param cube_id
	 * @param isTemp
	 * @return
	 */
	public Map<String, Object> consanguinity(String cube_id, String isTemp) {

		Map<String, Object> cube = dmJdbcTemplate.queryForMap(
				"select b.cube_id as modelId,b.oper_user as tableOwner from t_meta_cubes b where b.cube_id=?",
				new Object[] { cube_id });
		cube.put("bizUnitName", null);
		cube.put("nodeId", null);
		cube.put("envType", "PROD");
		List<Map<String, Object>> downResult = new ArrayList<Map<String, Object>>();
		List<Map<String, Object>> upResult = new ArrayList<Map<String, Object>>();

		// 下游节点
		List<Map<String, Object>> downStreamTableList = dmJdbcTemplate
				.queryForList("select cube.object_type,a.child_id as modelId,a.child_code as tableName,"
						+ "(select count(1) from t_meta_cube_relation b where b.parent_id=a.child_id) as parents ,"
						+ "(select GROUP_CONCAT(c.element_code) from t_meta_entity_relation b inner join t_meta_element c on b.element_id=c.element_id where b.cube_id=a.child_id) as elements "
						+ "from t_meta_cube_relation a inner join t_meta_cubes cube on a.child_id=cube.cube_id "
						+ "where a.parent_id=?", new Object[] { cube_id });

		if ("1".equals(isTemp)) {
			for (Map<String, Object> ds : downStreamTableList) {
				String object_type = (String) ds.get("object_type");
				if ("24".equals(object_type)) {
					List<Map<String, Object>> list = dmJdbcTemplate
							.queryForList("select cube.object_type,a.child_id as modelId,a.child_code as tableName,"
									+ "(select count(1) from t_meta_cube_relation b where b.parent_id=a.child_id) as parents ,"
									+ "(select GROUP_CONCAT(c.element_code) from t_meta_entity_relation b inner join t_meta_element c on b.element_id=c.element_id where b.cube_id=a.child_id) as elements "
									+ "from t_meta_cube_relation a inner join t_meta_cubes cube on a.child_id=cube.cube_id "
									+ "where a.parent_id=?", new Object[] { ds.get("modelId") });

					for (Map<String, Object> map : list) {
						map.put("tempTable", false);
						map.put("envType", null);

						long childs = (Long) map.get("parents");
						if (childs > 0) {
							map.put("modelType", 4);
						} else {
							map.put("modelType", 1);
						}

						String[] elements = null;
						String element = CommonUtils.valueOf(map.get("elements"));
						if(StringUtils.isNotBlank(element)){
							elements = element.split(",");
						}
						map.put("columnNameList", elements);

						downResult.add(map);
					}
				} else {
					ds.put("tempTable", false);
					ds.put("envType", null);

					long childs = (Long) ds.get("parents");
					if (childs > 0) {
						ds.put("modelType", 4);
					} else {
						ds.put("modelType", 1);
					}

					String[] elements = null;
					String element = CommonUtils.valueOf(ds.get("elements"));
					if(StringUtils.isNotBlank(element)){
						elements = element.split(",");
					}
					ds.put("columnNameList", elements);

					downResult.add(ds);
				}
			}

			// 4有下游节点
			if (downResult != null && downResult.size() != 0) {
				cube.put("modelType", 4);
				cube.put("directDownStreamTableCount", downResult.size());
			} else {
				cube.put("modelType", 1);
				cube.put("directDownStreamTableCount", 0);
			}
		} else {
			// 4有下游节点
			if (downStreamTableList != null && downStreamTableList.size() != 0) {
				cube.put("modelType", 4);
				cube.put("directDownStreamTableCount", downStreamTableList.size());
			} else {
				cube.put("modelType", 1);
				cube.put("directDownStreamTableCount", 0);
			}

			for (Map<String, Object> ds : downStreamTableList) {
				ds.put("tempTable", false);
				ds.put("envType", null);

				long childs = (Long) ds.get("parents");
				if (childs > 0) {
					ds.put("modelType", 4);
				} else {
					ds.put("modelType", 1);
				}

				String[] elements = null;
				String element = CommonUtils.valueOf(ds.get("elements"));
				if(StringUtils.isNotBlank(element)){
					elements = element.split(",");
				}
				ds.put("columnNameList", elements);
			}

			downResult = downStreamTableList;
		}

		List<Consanguinity> dresult = new ArrayList<Consanguinity>();
		for (Map<String, Object> dr : downResult) {
			Consanguinity c = (Consanguinity) mapToObj(dr, Consanguinity.class);
			dresult.add(c);
		}

		List<Consanguinity> distinctDresult = dresult.stream().filter(distinctByKey(Consanguinity::getModelId))
				.collect(Collectors.toList());

		cube.put("downStreamTableList", distinctDresult);

		// 上游节点
		List<Map<String, Object>> upStreamTableList = dmJdbcTemplate
				.queryForList("select cube.object_type,a.parent_id as modelId,a.parent_code as tableName,"
						+ "(select count(1) from t_meta_cube_relation b where b.child_id=a.parent_id) as childs,"
						+ "(select GROUP_CONCAT(c.element_code) from t_meta_entity_relation b inner join t_meta_element c on b.element_id=c.element_id where b.cube_id=a.parent_id) as elements "
						+ "from t_meta_cube_relation a inner join t_meta_cubes cube on a.parent_id=cube.cube_id "
						+ "where a.child_id=?", new Object[] { cube_id });

		if ("1".equals(isTemp)) {

			for (Map<String, Object> us : upStreamTableList) {
				String object_type = (String) us.get("object_type");
				if ("24".equals(object_type)) {
					List<Map<String, Object>> list = dmJdbcTemplate
							.queryForList("select cube.object_type,a.parent_id as modelId,a.parent_code as tableName,"
									+ "(select count(1) from t_meta_cube_relation b where b.child_id=a.parent_id) as childs,"
									+ "(select GROUP_CONCAT(c.element_code) from t_meta_entity_relation b inner join t_meta_element c on b.element_id=c.element_id where b.cube_id=a.parent_id) as elements "
									+ "from t_meta_cube_relation a inner join t_meta_cubes cube on a.child_id=cube.cube_id "
									+ "where a.child_id=?", new Object[] { us.get("modelId") });

					for (Map<String, Object> map : list) {
						map.put("tempTable", false);
						map.put("envType", null);

						long childs = (Long) map.get("childs");
						if (childs > 0) {
							map.put("modelType", 4);
						} else {
							map.put("modelType", 1);
						}

						String[] elements = ((String) map.get("elements")).split(",");
						map.put("columnNameList", elements);

						upResult.add(map);
					}
				} else {
					us.put("tempTable", false);
					us.put("envType", null);

					long childs = (Long) us.get("childs");
					if (childs > 0) {
						us.put("modelType", 4);
					} else {
						us.put("modelType", 1);
					}

					String[] elements = ((String) us.get("elements")).split(",");
					us.put("columnNameList", elements);

					upResult.add(us);
				}
			}

			// 4有上游游节点
			if (upStreamTableList != null && upStreamTableList.size() != 0) {
				cube.put("modelType", 4);
				cube.put("directUpStreamTableCount", upStreamTableList.size());
			} else {
				cube.put("modelType", 1);
				cube.put("directUpStreamTableCount", 0);
			}
		} else {
			// 4有上游游节点
			if (upStreamTableList != null && upStreamTableList.size() != 0) {
				cube.put("modelType", 4);
				cube.put("directUpStreamTableCount", upStreamTableList.size());
			} else {
				cube.put("modelType", 1);
				cube.put("directUpStreamTableCount", 0);
			}

			for (Map<String, Object> ds : upStreamTableList) {
				ds.put("tempTable", false);
				ds.put("envType", null);

				long childs = (Long) ds.get("childs");
				if (childs > 0) {
					ds.put("modelType", 4);
				} else {
					ds.put("modelType", 1);
				}

				String[] elements = null;
				if (ds.get("elements") != null && !"".equals(ds.get("elements"))) {
					elements = ((String) ds.get("elements")).split(",");
				} else {
					elements = new String[0];
				}
				ds.put("columnNameList", elements);
			}
			
			upResult=upStreamTableList;
		}
		
		List<Consanguinity> uresult = new ArrayList<Consanguinity>();
		for (Map<String, Object> dr : upResult) {
			Consanguinity c = (Consanguinity) mapToObj(dr, Consanguinity.class);
			uresult.add(c);
		}

		List<Consanguinity> distinctUresult = uresult.stream().filter(distinctByKey(Consanguinity::getModelId))
				.collect(Collectors.toList());

		cube.put("upStreamTableList", distinctUresult);

		return cube;
	}
	
	/**
	 * 产出信息
	 * @param params
	 * @return
	 */
	public Map<String,Object> getOutputInfo(Map<String,Object> params){
		Object cube_id = params.get("cube_id");
		String source_type = CommonUtils.valueOf(params.get("source_type"));
		int pageSize = Integer.parseInt(CommonUtils.valueOf(params.get("pageSize")));
		int pageNo = Integer.parseInt(CommonUtils.valueOf(params.get("pageNo")));
		int offset = (pageNo - 1) * pageSize ;
		
		Object totalRecords = 0;
		List<Map<String,Object>> taskList = new ArrayList<Map<String,Object>>();
		if(StringUtils.equalsIgnoreCase(source_type, "hive")){
			String cubeSql = "select logic_id from t_meta_cubes where cube_id = ?";
			String logic_id = CommonUtils.valueOf(dmJdbcTemplate.queryForMap(cubeSql,cube_id).get("logic_id"));
			if(StringUtils.isNotBlank(logic_id)){
				String countSql = "select count(1) count from t_ds_process_instance where name like ?";
				totalRecords = dsJdbcTemplate.queryForMap(countSql,"%"+logic_id+"%").get("count");
				
				String sql = getTaskSql();
				taskList = dsJdbcTemplate.queryForList(sql,"%"+logic_id+"%",pageSize,offset);
			}
		}
		Map<String,Object> data = new HashMap<String,Object>();
		data.put("total_records", totalRecords);
		data.put("task_data", taskList);
		return data;
	}
	
	/**
	 * 数据表影响
	 * @param params
	 * @return
	 */
	public Map<String,Object> queryTableEffectList(Map<String,Object> params){
		String source_type = CommonUtils.valueOf(params.get("source_type"));
		String cube_id = CommonUtils.valueOf(params.get("cube_id"));
		String show_direct_down = CommonUtils.valueOf(params.get("show_direct_down"));//1：仅展示直接下游,2：展示所有下游
		int pageSize = Integer.parseInt(CommonUtils.valueOf(params.get("pageSize")));
		int pageNo = Integer.parseInt(CommonUtils.valueOf(params.get("pageNo")));
		int offset = (pageNo - 1) * pageSize ;
		
		Object totalRecords = 0;
		List<Map<String,Object>> tableEffectList = new ArrayList<Map<String,Object>>();
		if(StringUtils.equalsIgnoreCase(source_type, "hive")){
			totalRecords = getTableEffectCount(cube_id,show_direct_down);
			
			tableEffectList = getTableEffectList(cube_id,show_direct_down,pageSize,offset);
		}else{//Tidb,es
		}
		
		Map<String,Object> data = new HashMap<String,Object>();
		data.put("total_records", totalRecords);
		data.put("table_effect_data", tableEffectList);
		
		return data;
	}

	/**
	 * 集成同步影响
	 * @param params
	 * @return
	 */
	public Map<String,Object> queryCorpusSyncEffectList(Map<String,Object> params){
		String source_type = CommonUtils.valueOf(params.get("source_type"));
		String cube_id = CommonUtils.valueOf(params.get("cube_id"));
		String show_direct_down = CommonUtils.valueOf(params.get("show_direct_down"));//1：仅展示直接下游,2：展示所有下游
		int pageSize = Integer.parseInt(CommonUtils.valueOf(params.get("pageSize")));
		int pageNo = Integer.parseInt(CommonUtils.valueOf(params.get("pageNo")));
		int offset = (pageNo - 1) * pageSize ;
		
		Object totalRecords = 0;
		List<Map<String,Object>> tableEffectList = new ArrayList<Map<String,Object>>();
		if(StringUtils.equalsIgnoreCase(source_type, "hive")){
			String sql = "select count(1) count from t_meta_integrat_sync_effect where cube_id = ?";
			if(StringUtils.equals(show_direct_down, "1")){
				sql = sql +" and show_direct_down = '1'";
			}
			totalRecords = dmJdbcTemplate.queryForMap(sql,cube_id).get("count");
			
			sql = "select cube_code,effect_level,effect_task,dest_cube_code,dest_ds_project,create_user from t_meta_integrat_sync_effect where cube_id = ?";
			if(StringUtils.equals(show_direct_down, "1")){
				sql = sql +" and show_direct_down = '1'";
			}
			sql = sql + " limit ? offset ? ";
			tableEffectList = dmJdbcTemplate.queryForList(sql,cube_id,pageNo,offset);
		}else{//tidb,es
		}
		
		Map<String,Object> data = new HashMap<String,Object>();
		data.put("total_records", totalRecords);
		data.put("table_effect_data", tableEffectList);
		
		return data;
	}
	
	/**
	 * 使用说明保存
	 * @param params
	 */
	public void saveUseDesc(Map<String,Object> params){
		Map<String,Object> data = new HashMap<String,Object>();
		data.put("cube_id", params.get("cube_id"));
		data.put("use_title", params.get("use_title"));
		data.put("use_content", params.get("use_content"));
		data.put("create_time", new Date());
		data.put("create_user", params.get("login_code"));
		
		StringBuilder sqlSb = new StringBuilder("insert into t_meta_cubes_usedesc (");
		commonDao.insertTableData(data, sqlSb);
	}
	
	/**
	 * 使用说明编辑
	 * @param params
	 */
	public void editUseDesc(Map<String,Object> params){
		Map<String,Object> data = new HashMap<String,Object>();
		data.put("use_title", params.get("use_title"));
		data.put("use_content", params.get("use_content"));
		data.put("create_time", new Date());
		
		String queryConf = " where cube_id = ?";
		StringBuilder sqlSb = new StringBuilder("update t_meta_cubes_usedesc set ");
		commonDao.updateTable(data, sqlSb, queryConf,params.get("cube_id"));
	}
	
	/**
	 * 使用说明删除
	 * @param params
	 */
	public void deleteUseDesc(String cube_id){
		String sql = "delete from t_meta_cubes_usedesc where cube_id = ?";
		dmJdbcTemplate.update(sql,cube_id);
	}

	/**
	 * 使用说明查询
	 * @param params
	 */
	public Map<String,Object> queryByCubeId(String cube_id){
		String sql = "select use_title,use_content,create_user,date_format(create_time,'%Y-%m-%d %H:%i:%s') as oper_time from t_meta_cubes_usedesc where cube_id = ?";
		List<Map<String,Object>> list = dmJdbcTemplate.queryForList(sql,cube_id);
		
		Map<String,Object> map = new HashMap<String,Object>();
		if(!list.isEmpty()){
			map = list.get(0);
		}
		return map;
	}
	
	/**
	 * 查询表类型
	 * @return
	 */
	public List<Map<String,Object>> queryObjectType(){
		List<Map<String,Object>> list = new ArrayList<Map<String,Object>>();
		Map<String,Object> map = new HashMap<String,Object>();
		map.put("object_type", "11");//11,50
		map.put("object_type_name", "物理表");
		list.add(map);
		
		map = new HashMap<String,Object>();
		map.put("object_type", "16");
		map.put("object_type_name", "维度逻辑表");
		list.add(map);
		
		map = new HashMap<String,Object>();
		map.put("object_type", "17");
		map.put("object_type_name", "事实逻辑表");
		list.add(map);
		
		map = new HashMap<String,Object>();
		map.put("object_type", "18");
		map.put("object_type_name", "汇总逻辑表");
		list.add(map);
		
		map = new HashMap<String,Object>();
		map.put("object_type", "42");
		map.put("object_type_name", "标签表");
		list.add(map);
		
		map = new HashMap<String,Object>();
		map.put("object_type", "51");
		map.put("object_type_name", "索引");
		list.add(map);
		return list;
	}
	
	/**
	 * 根据项目查询数据表
	 * @param project_code
	 * @return
	 */
	public List<Map<String,Object>> queryMetaTable(String project_code){
		String sql = "select cube_id,cube_code from t_meta_cubes where project_code = ?";
		return dmJdbcTemplate.queryForList(sql,project_code);
	}
	
	/**
	 * 数据表详情
	 * @param params
	 * @param login_code
	 * @return
	 * @throws ParseException 
	 */
	public Map<String,Object> queryById(Map<String,Object> params,String login_code) throws ParseException{
		String cube_id = CommonUtils.valueOf(params.get("cube_id"));
		String source_type = CommonUtils.valueOf(params.get("source_type"));
		
		Map<String, Object> tableInfo = queryById(cube_id,login_code);
		List<Map<String, Object>> columns = getColumns(cube_id,source_type);
		String searchSql = getSearchSql(cube_id, tableInfo.get("cube_code"),source_type);
		List<Map<String, Object>> primarys = getPartitonColumns(cube_id,source_type);
		
		String raw_size = CommonUtils.valueOf(tableInfo.get("raw_size"));
		raw_size = CommonUtils.formateRawSize(raw_size);
		Map<String,Object> phyInfo = new HashMap<String,Object>();
		phyInfo.put("storage_type", tableInfo.get("source_type"));
		phyInfo.put("storage_size", raw_size);
		phyInfo.put("life_cycle", StringUtils.isBlank(CommonUtils.valueOf(tableInfo.get("cycle_day"))) ? "" : tableInfo.get("cycle_day")+"天");
		
		Map<String,Object> useInfo = new HashMap<String,Object>();
		useInfo.put("collect_count", tableInfo.get("collect_count")+"次");
		useInfo.put("browse_count", 11+"次");
		useInfo.put("visit_count", 13+"次");
		
		Map<String,Object> changeInfo = new HashMap<String,Object>();
		changeInfo.put("data_change", DateUtils.getDelay(8, "yyyy-MM-dd HH:mm:ss", new Date()));
		changeInfo.put("lately_visit", DateUtils.getDelay(1, "yyyy-MM-dd HH:mm:ss", new Date()));
		changeInfo.put("ddl_change", DateUtils.getDelay(12, "yyyy-MM-dd HH:mm:ss", new Date()));
		
		Map<String,Object> data = new HashMap<String,Object>();
		data.put("columns", columns);
		data.put("primarys", primarys);
		data.put("tableInfo", tableInfo);
		data.put("searchSql", searchSql);
		data.put("phyInfo", phyInfo);//物理信息
		data.put("changeInfo", changeInfo);//变更信息
		data.put("useInfo", useInfo);//使用信息
		
		return data;
	}
	
	public List<Map<String, Object>> getPartitonColumns(String cube_id,String source_type) {
		List<Map<String,Object>> partitionList = new ArrayList<Map<String,Object>>();
		if(StringUtils.equalsIgnoreCase(source_type, "hive")){
			String sql = "select object_type,business_field_id from t_meta_cubes where cube_id = ?";
			Map<String,Object> cubeMap = dmJdbcTemplate.queryForMap(sql,cube_id);
			String object_type = CommonUtils.valueOf(cubeMap.get("object_type"));
			if(StringUtils.equals(object_type, "11") || StringUtils.equals(object_type, "50")){
				sql = "select partition_code as element_code,'' as element_name,partition_type as element_type,'--' as rel_standards,'--' as info_classify,'--' as safe_level,1 as data_hot from t_meta_entity_partition where cube_id = ?";
				partitionList = dmJdbcTemplate.queryForList(sql,cube_id);
			}else{
				Map<String,Object> dsMap = commonService.getDsInfo(cubeMap.get("business_field_id"));
				Map<String,Object> partitionMap = new HashMap<String,Object>();
				partitionMap.put("element_code", dsMap.get("partition_code"));
				partitionMap.put("element_name", dsMap.get("partition_name"));
				partitionMap.put("element_type", dsMap.get("data_type"));
				partitionMap.put("rel_standards", "--");
				partitionMap.put("info_classify", "--");
				partitionMap.put("safe_level", "--");
				partitionMap.put("data_hot", 1);
				partitionList.add(partitionMap);
			}
		}else{
			String sql = "select partition_code as element_code,'' as element_name,'varchar(64)' as element_type,'--' as rel_standards,'--' as info_classify,'--' as safe_level,1 as data_hot from t_asset_tidb_prt_collect where table_id = ?";
			partitionList = dmJdbcTemplate.queryForList(sql,cube_id);
		}
		
		return partitionList;
	}
	
	public String getSearchSql(String cube_id, Object cube_code,String source_type) {
		String serachSql = "select ";
		List<Map<String, Object>> colunms = getColumns(cube_id,source_type);

		if(!colunms.isEmpty()){
			for (Map<String, Object> c : colunms) {
				serachSql = serachSql + c.get("element_code") + ",";
			}

			serachSql = serachSql.substring(0, serachSql.lastIndexOf(",") - 1);
		}
		serachSql = serachSql + " from " + cube_code;

		return serachSql;
	}
	
	public List<Map<String, Object>> getColumns(String cube_id,String source_type) {
		List<Map<String, Object>> columns = new ArrayList<Map<String,Object>>();
		if(StringUtils.equalsIgnoreCase(source_type, "hive")){
			String sql = "select c.element_code,c.element_name,c.element_type,'--' as rel_standards,'--' as info_classify,'--' as safe_level,1 as data_hot from t_meta_entity_relation b inner join t_meta_element c on b.element_id=c.element_id where b.cube_id = ?";
			columns = dmJdbcTemplate.queryForList(sql,new Object[] { cube_id });
		}else if(StringUtils.equalsIgnoreCase(source_type, "elasticsearch") || StringUtils.equalsIgnoreCase(source_type, "es")){
			String sql = getEsColumnSql();
			List<Map<String,Object>> list = dmJdbcTemplate.queryForList(sql,cube_id,cube_id);
			for(Map<String,Object> map : list){
				String child = CommonUtils.valueOf(map.get("child"));
				
				Map<String,Object> data = new LinkedHashMap<String,Object>();
				data.put("element_id", map.get("element_id"));
				data.put("element_code", map.get("element_code"));
				data.put("element_type", map.get("element_type"));
				data.put("analyzer", map.get("analyzer"));
				data.put("rel_standards", map.get("rel_standards"));
				data.put("info_classify", map.get("info_classify"));
				data.put("safe_level", map.get("safe_level"));
				data.put("data_hot", map.get("data_hot"));
				
				List<Map<String,Object>> childList = new ArrayList<Map<String,Object>>();
				if(StringUtils.isNotBlank(child)){
					String[] subFieldStr = StringUtils.split(child, "&");
					for(String subField : subFieldStr){
						String[] subFieldCode = StringUtils.split(subField, "@");
						
						Map<String,Object> childMap = new LinkedHashMap<String,Object>();
						childMap.put("element_id", subFieldCode[0]);
						childMap.put("element_code", subFieldCode[1]);
						childMap.put("element_type", subFieldCode[2]);
						if(subFieldCode.length == 4){
							childMap.put("analyzer", subFieldCode[3]);
						}else{
							childMap.put("analyzer", "");
						}
						childMap.put("rel_standards", "--");
						childMap.put("info_classify", "--");
						childMap.put("safe_level", "--");
						childMap.put("data_hot", 1);
						childList.add(childMap);
					}
				}
				data.put("child", childList);
				columns.add(data);
			}
		}else{
			String sql = "select column_code as element_code,column_name as element_name,column_type as element_type,'--' as rel_standards,'--' as info_classify,'--' as safe_level,1 as data_hot from t_asset_meta_col_collect where table_id = ?";
			columns = dmJdbcTemplate.queryForList(sql,cube_id);
		}
		return columns;
	}
	
	public Map<String, Object> queryById(String cube_id,String login_code) {
		StringBuffer sql = getCubeSql();
		sql.append(" and t1.table_id = ?");
		Map<String, Object> result = dmJdbcTemplate.queryForList(sql.toString(),new Object[] { cube_id }).get(0);

		String lifeSql = "select cycle_day from t_meta_object_lifecycle where object_code = ? and project_code = ?";
		List<Map<String,Object>> lifeList = dmJdbcTemplate.queryForList(lifeSql,result.get("cube_code"),result.get("project_code"));
		if(!lifeList.isEmpty()){
			Map<String,Object> lifeMap = lifeList.get(0);
			result.put("cycle_day", lifeMap.get("cycle_day"));
		}
		
		String trailSql = "select is_collect from t_asset_m_trail where id = ? and login_code = ?";
		List<Map<String, Object>> collects = dmJdbcTemplate.queryForList(trailSql,new Object[] {cube_id,login_code});
		if (collects == null || collects.size() == 0) {
			result.put("is_collect", 0);
		} else {
			result.put("is_collect", Integer.parseInt(collects.get(0).get("is_collect").toString()));
		}
		
		String collectSql = "select count(1) count from t_asset_m_trail where id = ? and is_collect = 1";
		Object collectCount = dmJdbcTemplate.queryForMap(collectSql,cube_id).get("count");
		
		result.put("collect_count", collectCount);
		
		return result;
	}
	
	/**
	 * 元数据同步
	 * @param params
	 */
	public void metaSync(Map<String,Object> params){
		try{
			String project_code = CommonUtils.valueOf(params.get("project_code"));

			log.info("===========元数据同步开始============");
			
			assetMetaDataCollectServ.syncMeta(project_code);
			
			log.info("===========元数据同步结束============");

			//log.info("===========hive元数据同步开始============");
			
			//syncMetaHiveService.syncHiveMeta(project_code);
			
			//log.info("===========hive元数据同步结束============");
		}catch(Exception e){
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
	}
	
	private List<Map<String,Object>> getTableEffectList(String cube_id,String show_direct_down,int pageSize,int offset) {
		NamedParameterJdbcTemplate namedParamJdbcTemp = new NamedParameterJdbcTemplate(dmJdbcTemplate);
		List<Map<String,Object>> resultList = new ArrayList<Map<String,Object>>();
		if(StringUtils.equals(show_direct_down, "1")){
			String sql = "select cubes.cube_id,cubes.cube_code,cubes.object_type from t_meta_cube_relation rel,t_meta_cubes cubes where rel.child_id = cubes.cube_id and rel.parent_id = ?";
			List<Map<String,Object>> effectList = dmJdbcTemplate.queryForList(sql,cube_id);
			
			List<String> effectCubes = new ArrayList<String>();
			List<String> sumTempCubes = new ArrayList<String>();
			for(Map<String,Object> effectMap : effectList){
				String effect_cube_id = CommonUtils.valueOf(effectMap.get("cube_id"));
				String object_type = CommonUtils.valueOf(effectMap.get("object_type"));
				if(StringUtils.equals(object_type, ConstantUtil.SUMMARYTEMP_OBJECT_TYPE)){
					sumTempCubes.add(effect_cube_id);
				}else{
					effectCubes.add(effect_cube_id);
				}
			}
			if(!sumTempCubes.isEmpty()){
				sql = "select cubes.cube_id,cubes.cube_code,cubes.object_type from t_meta_cube_relation rel,t_meta_cubes cubes where rel.child_id = cubes.cube_id and rel.parent_id in(:cubeIds)";
				Map<String, Object> args = new HashMap<>();
				args.put("cubeIds",sumTempCubes);
				List<Map<String,Object>> sumList =  namedParamJdbcTemp.queryForList(sql, args);
				for(Map<String,Object> sumMap : sumList){
					String effect_cube_id = CommonUtils.valueOf(sumMap.get("cube_id"));
					effectCubes.add(effect_cube_id);
				}
			}
			
			sql = "select cube_id,cube_code,2 as effect_level,project_name,business_field_name,object_type,oper_user as create_user from t_meta_cubes where cube_id in(:cubeIds) limit :pageSize offset :offset";
			Map<String, Object> args = new HashMap<>();
			args.put("cubeIds",effectCubes);
			args.put("pageSize", pageSize);
			args.put("offset", offset);
			
			if(!effectCubes.isEmpty()){
				resultList = namedParamJdbcTemp.queryForList(sql, args);
			}
		}else{
			String sql = "select getChildList(?) as childs";
			String[] childs = CommonUtils.valueOf(dmJdbcTemplate.queryForMap(sql,cube_id).get("childs")).split(",");
			Set<String> cubes = new HashSet<String>();
			for (String c : childs) {
				if (StringUtils.isNotBlank(c) && !StringUtils.equals(c, cube_id)) {
					cubes.add(c);
				}
			}
			
			List<String> cubeIds = new ArrayList<String>(cubes);
			sql = "select cube_id,cube_code,project_name,business_field_name,object_type,oper_user as create_user from t_meta_cubes where object_type !='24' and cube_id in(:cubeIds) limit :pageSize offset :offset";
			Map<String, Object> args = new HashMap<>();
			args.put("cubeIds",cubeIds);
			args.put("pageSize", pageSize);
			args.put("offset", offset);
			
			if(!cubeIds.isEmpty()){
				resultList = namedParamJdbcTemp.queryForList(sql, args);
			}
			sql = "select cubes.cube_id,cubes.cube_code,cubes.object_type from t_meta_cube_relation rel,t_meta_cubes cubes where rel.child_id = cubes.cube_id and rel.parent_id in(:cubeIds)";
			args.put("cubeIds",cube_id);
			int i = 2;//影响表层级数
			Map<String,Object> effectData = new HashMap<String,Object>();
			while(true){
				List<Map<String, Object>> effectLevel = namedParamJdbcTemp.queryForList(sql, args);
				if(effectLevel.isEmpty()){
					break;
				}
				List<String> effectCubes = new ArrayList<String>();
				List<String> sumTempCubes = new ArrayList<String>();
				boolean isHasSummaryTemp = false;
				for(Map<String,Object> effectMap : effectLevel){
					String object_type = CommonUtils.valueOf(effectMap.get("object_type"));
					String effect_cube_id = CommonUtils.valueOf(effectMap.get("cube_id"));
					if(StringUtils.equals(object_type, ConstantUtil.SUMMARYTEMP_OBJECT_TYPE)){
						isHasSummaryTemp = true;
						sumTempCubes.add(effect_cube_id);
					}else{
						effectData.put(effect_cube_id, i);
						effectCubes.add(effect_cube_id);
					}
				}
				if(isHasSummaryTemp){//汇总临时表不包括在内，直接查询其对应的汇总表
					Map<String, Object> sumTemp = new HashMap<>();
					sumTemp.put("cubeIds", sumTempCubes);
					List<Map<String, Object>> sumTempEffectLevel = namedParamJdbcTemp.queryForList(sql, sumTemp);
					for(Map<String,Object> effectMap : sumTempEffectLevel){
						String effect_cube_id = CommonUtils.valueOf(effectMap.get("cube_id"));
						effectData.put(effect_cube_id, i);
						effectCubes.add(effect_cube_id);
					}
				}
				//重新组装过滤条件查询，继续查询
				args.put("cubeIds", effectCubes);
				i++;
			}
			for(Map<String,Object> resultMap : resultList){
				cube_id = CommonUtils.valueOf(resultMap.get("cube_id"));
				resultMap.put("effect_level", effectData.get(cube_id));
			}
		}
		return resultList;
	}
	
	private Object getTableEffectCount(String cube_id,String show_direct_down) {
		NamedParameterJdbcTemplate namedParamJdbcTemp = new NamedParameterJdbcTemplate(dmJdbcTemplate);
		Object totalRecords = 0;
		if(StringUtils.equals(show_direct_down, "1")){
			String sql = "select cubes.cube_id,cubes.cube_code,cubes.object_type from t_meta_cube_relation rel,t_meta_cubes cubes where rel.child_id = cubes.cube_id and rel.parent_id = ?";
			List<Map<String,Object>> effectList = dmJdbcTemplate.queryForList(sql,cube_id);
			List<String> effectCubes = new ArrayList<String>();
			List<String> sumTempCubes = new ArrayList<String>();
			for(Map<String,Object> effectMap : effectList){
				String effect_cube_id = CommonUtils.valueOf(effectMap.get("cube_id"));
				String object_type = CommonUtils.valueOf(effectMap.get("object_type"));
				if(StringUtils.equals(object_type, ConstantUtil.SUMMARYTEMP_OBJECT_TYPE)){
					sumTempCubes.add(effect_cube_id);
				}else{
					effectCubes.add(effect_cube_id);
				}
			}
			if(!sumTempCubes.isEmpty()){
				sql = "select cubes.cube_id,cubes.cube_code,cubes.object_type from t_meta_cube_relation rel,t_meta_cubes cubes where rel.child_id = cubes.cube_id and rel.parent_id in(:cubeIds)";
				Map<String, Object> args = new HashMap<>();
				args.put("cubeIds",sumTempCubes);
				
				List<Map<String,Object>> sumList =  namedParamJdbcTemp.queryForList(sql, args);
				for(Map<String,Object> sumMap : sumList){
					String effect_cube_id = CommonUtils.valueOf(sumMap.get("cube_id"));
					effectCubes.add(effect_cube_id);
				}
			}
			
			sql = "select count(1) count from t_meta_cubes where cube_id in(:cubeIds)";
			if(!effectCubes.isEmpty()){
				Map<String, Object> args = new HashMap<>();
				args.put("cubeIds",effectCubes);
				totalRecords = namedParamJdbcTemp.queryForMap(sql, args).get("count");
			}else{
				totalRecords = 0;
			}
		}else{
			String sql = "select getChildList(?) as childs";
			String[] childs = CommonUtils.valueOf(dmJdbcTemplate.queryForMap(sql,cube_id).get("childs")).split(",");
			Set<String> cubes = new HashSet<String>();
			for (String c : childs) {
				if (StringUtils.isNotBlank(c) && !StringUtils.equals(c, cube_id)) {
					cubes.add(c);
				}
			}
			List<String> cubeIds = new ArrayList<String>(cubes);
			if(!cubeIds.isEmpty()){
				sql = "select count(1) count from t_meta_cubes where object_type !='24' and cube_id in(:cubeIds)";
				Map<String, Object> args = new HashMap<>();
				args.put("cubeIds",cubeIds);
				totalRecords = namedParamJdbcTemp.queryForMap(sql, args).get("count");
			}else{
				totalRecords = 0;
			}
			
		}
		return totalRecords;
	}

	private Object mapToObj(Map<String, Object> map, Class<?> clz) {
		return com.alibaba.fastjson.JSONObject.parseObject(com.alibaba.fastjson.JSONObject.toJSONString(map), clz);
	}
	
	private static <T> Predicate<T> distinctByKey(Function<? super T, Object> keyExtractor) {
		Map<Object, Boolean> seen = new ConcurrentHashMap<>();
		return object -> seen.putIfAbsent(keyExtractor.apply(object), Boolean.TRUE) == null;
	}
	
	private StringBuffer getCubeSql() {
		StringBuffer sql = new StringBuffer();
		sql.append("select t1.table_id as cube_id,");
		sql.append("      t1.table_code as cube_code,");
		sql.append("      case when t1.table_name is null or t1.table_name = '' then t1.table_code else t1.table_name end as cube_name,");
		sql.append("      t1.create_user as oper_user,");
		sql.append("      t2.project_id,");
		sql.append("      t2.project_name,");
		sql.append("      t2.project_code,");
		sql.append("      t2.busin_id as business_field_id,");
		sql.append("      DATE_FORMAT(create_time, '%Y-%m-%d %H:%i:%s') as create_time,");
		sql.append("	  case");
		sql.append("	     when t1.object_type in ('37','38','39','40','42','43') then '42'");
		sql.append("      else t1.object_type end as object_type,");
		sql.append("	  case when t1.object_type = '10' then '维度'");
		sql.append("	       when t1.object_type in ('11', '50') then '物理表'");
		sql.append("	       when t1.object_type = '51' then '索引'");
		sql.append("	       when t1.object_type = '16' then '维度逻辑表'");
		sql.append("	       when t1.object_type = '17' then '事实逻辑表'");
		sql.append("	       when t1.object_type = '18' then '汇总逻辑表'");
		sql.append("	       when t1.object_type in('37','38','39','40','42','43') then '标签表'");
		sql.append("	   end as object_name,");
		sql.append("       t1.db_type as source_type,");
		sql.append("       t1.ds_id,");
		sql.append("       t1.row_num as num_rows,");
		sql.append("       case when t1.raw_size is null then 0 else t1.raw_size end as raw_size");
		sql.append(" from t_asset_meta_tab_collect t1 left join t_asset_meta_struct t2");
		sql.append("    on t1.table_id = t2.table_id");
		sql.append("  where 1 = 1");
		return sql;
	}
	
	private String getTaskSql() {
		StringBuffer sql = new StringBuffer();
		sql.append("select name,");
		sql.append("	   case when state = 0 then '提交成功'");
		sql.append("            when state = 1 then '正在执行'");
		sql.append("            when state = 2 then '准备暂停'");
		sql.append("            when state = 3 then '暂停'");
		sql.append("            when state = 4 then '准备停止'");
		sql.append("            when state = 5 then '停止'");
		sql.append("            when state = 6 then '失败'");
		sql.append("            when state = 7 then '成功'");
		sql.append("            when state = 8 then '需要容错'");
		sql.append("            when state = 9 then 'Kill'");
		sql.append("            when state = 10 then '等待线程'");
		sql.append("            when state = 11 then '等待依赖完成'");
		sql.append("            when state = 12 then '延迟执行'");
		sql.append("            when state = 13 then '强制成功' end as status,");
		sql.append("	   date_format(start_time,'%Y-%m-%d %H:%i:%s') as start_time,");
		sql.append("	   date_format(end_time,'%Y-%m-%d %H:%i:%s') as end_time,");
		sql.append("	  ifnull(concat(");
		sql.append("	    if(floor(hour(timediff(end_time,start_time))/24)=0,'',concat(floor(hour(timediff(end_time,start_time))/24),'d ')),");
		sql.append("		if(mod(hour(timediff(end_time,start_time)),24)=0,'',concat(mod(hour(timediff(end_time,start_time)),24), 'h ')),");
		sql.append("		if(minute(timediff(end_time,start_time))=0,'',concat(minute(timediff(end_time,start_time)),'m ')),");
		sql.append("		if(second(timediff(end_time,start_time))=0,'',concat(second(timediff(end_time,start_time)),'s'))),'-') as duration");
		sql.append(" from t_ds_process_instance");
		sql.append(" where name like ?");
		sql.append(" order by schedule_time desc");
		sql.append(" limit ? offset ? ");
		return sql.toString();
	}
	
	private String getEsColumnSql() {
		StringBuffer sql = new StringBuffer();
		sql.append("select fields_id as element_id,");
		sql.append("       fields_code as element_code,");
		sql.append("       fields_type as element_type,");
		sql.append("       analyzer,");
		sql.append("       '--' as rel_standards,'--' as info_classify,'--' as safe_level,1 as data_hot,");
		sql.append("       group_concat(concat_ws('@',sub_fields_id,sub_fields_code,sub_type,sub_analyzer) separator '&') child");
		sql.append("  from (select p.fields_id,p.fields_code, p.fields_type,p.analyzer,c.fields_id as sub_fields_id,c.sub_fields_code, c.fields_type as sub_type,c.analyzer as sub_analyzer");
		sql.append("          from (select fields_id,concat_ws('.',type,fields_code) as fields_code,fields_type,analyzer");
		sql.append("                  from t_asset_es_fields_collect p");
		sql.append("                 where sub_fields_code is null");
		sql.append("                   and index_id = ?) p");
		sql.append("          left join (select fields_id,concat_ws('.',type,fields_code) as fields_code,sub_fields_code,fields_type,analyzer");
		sql.append("                      from t_asset_es_fields_collect");
		sql.append("                     where sub_fields_code is not null");
		sql.append("                       and index_id = ?) c");
		sql.append("            on p.fields_code = c.fields_code) t");
		sql.append(" group by fields_id,fields_code,fields_type,analyzer");
		sql.append(" order by child desc");
		return sql.toString();
	}
}