package avicit.bdp.dcs.datasource.service;

import avicit.bdp.common.datasource.BaseDataSource;
import avicit.bdp.common.dto.StorageResourceConf;
import avicit.bdp.common.service.dto.DataSourceDTO;
import avicit.bdp.common.service.service.DataSourceService;
import avicit.bdp.common.utils.CheckParaUtils;
import avicit.bdp.common.utils.database.ColumnInfo;
import avicit.bdp.common.utils.database.DBUtils;
import avicit.bdp.common.utils.database.IotDbSessionUtils;
import avicit.bdp.common.utils.enums.JobType;
import avicit.bdp.common.utils.uploads.FileAdapterUtils;
import avicit.bdp.core.constant.Constants;
import avicit.bdp.dcs.datasource.dto.JobDatasource;
import avicit.bdp.dcs.datasource.service.ds.BaseDS;
import avicit.bdp.dcs.datasource.service.ds.DSFactory;
import avicit.bdp.dcs.job.dto.JobDTO;
import avicit.bdp.dcs.job.service.DcsJobService;
import avicit.bdp.dcs.task.dto.TaskDTO;
import avicit.bdp.dcs.task.service.TaskService;
import avicit.bdp.dcs.utils.DcsErrorCode;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
import org.apache.kafka.clients.admin.ConsumerGroupListing;
import org.apache.kafka.clients.admin.MemberAssignment;
import org.apache.kafka.clients.admin.MemberDescription;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * @金航数码科技有限责任公司
 * @作者：developer
 * @邮箱：developer@avic-digital.com
 * @创建时间： 2020-10-26 08:55
 * @类说明：
 * @修改记录：zhuhy通过抽象优化数据源获取类
 */
@Service
@Transactional(rollbackFor = Exception.class)
public class DsDatasourceService {

	private static final Logger logger = LoggerFactory.getLogger(DsDatasourceService.class);

	@Autowired
	private TaskService taskService;
	@Autowired
	private DcsJobService jobService;
	@Autowired
	private BaseDbService baseDbService;
	@Autowired
	private DataSourceService dataSourceService;
	@Autowired
	private DataSourceUtils dataSourceUtils;

    private final static String ID = "id";
    private final static String NAME = "name";
    private final static String DATASOURCE_TYPE = "dataSourceType";
    // 系统内置数据源
    private final static String SYSTEM_BUILT = "systemBuilt";
    private final static int TIME_OUT = 10;

    /**
     * 根据任务类型获取数据源列表
     * @param jobTypeCode 任务类型
     * @return 数据源列表
     */
    public List<Map<String, Object>> getDatasourceList(Integer jobTypeCode, String projectId) {
        List<Map<String, Object>> list = new ArrayList<>();
        if (jobTypeCode == null) {
            // 默认按结构化采集处理
            jobTypeCode = JobType.SYNC.getCode();
        }

        JobType eJobType = JobType.valueOf(jobTypeCode);
        if (eJobType == null) {
            logger.info("错误的jobType:" + jobTypeCode);
        } else {
            list = getDatasourceList(eJobType, projectId);
        }

		logger.info("jobType:{}, size:{}", jobTypeCode, list.size());
		return list;
	}

    /**
     * 查询数据源
     * @param jobType
     * @return
     */
    public List<Map<String, Object>> getDatasourceList(JobType jobType, String projectId) {
        List<Map<String, Object>> list = new ArrayList<>();
        List<DataSourceDTO> dataSourceList = dataSourceService.getProjectDataSourceList(projectId, null,null);
        if (CollectionUtils.isNotEmpty(dataSourceList)) {
            Map<String, DataSourceDTO> map = new HashMap<>(16);
            for (DataSourceDTO dataSourceDTO : dataSourceList) {
                String dsType = dataSourceDTO.getDatasourceType();
                BaseDS baseDS = DSFactory.getDatasource(dataSourceDTO);
                if(baseDS == null) {
                    logger.error("错误的dsType:" + dsType);
                    continue;
                }

				if (!baseDS.isSupportJobType(jobType)) {
					continue;
				}

                if (!baseDS.checkParams(dataSourceDTO)) {
                    continue;
                }
                BaseDataSource baseDataSource = DBUtils.getBaseDataSource(dataSourceDTO);
                String key = baseDataSource.getJdbcUrl() + baseDataSource.getUser() + baseDataSource.getPassword() + baseDataSource.getOther();
                if (map.get(key) == null) {
                    Map<String, Object> dataSourceMap = new HashMap<>(8);
                    dataSourceMap.put(ID, dataSourceDTO.getId());
                    dataSourceMap.put(NAME, dataSourceDTO.getDatasourceName());
                    dataSourceMap.put(DATASOURCE_TYPE, dsType);
                    dataSourceMap.put("createdBy", dataSourceDTO.getCreatedBy());
                    dataSourceMap.put(SYSTEM_BUILT, 0);
                    list.add(dataSourceMap);
//                    map.put(key, dataSourceDTO);
				}
			}
		}

        // 文件采集 新增默认内置一个文件管理的数据源
        if(jobType == JobType.SEMISTRUCTURE){
            try{
                list.add(getDefaultFileDataSource(projectId));
            }catch (Exception e){
                logger.error("获取默认内置的文件管理的数据源失败");
            }
        }

        logger.info("dataSourceList.size:{}, list.size:{}, jobType:{}", dataSourceList.size(), list.size(), jobType);
        if (list.size() == 0) {
            logger.error("获取数据源报错,数据源为空");
        }
        return list;
    }

    /**
     * 新增默认内置一个文件管理的数据源
     * @return
     */
    private Map<String, Object> getDefaultFileDataSource(String projectId) {
        StorageResourceConf conf = FileAdapterUtils.getStorageResourceByProjectId(projectId);
        String dsType = "";
        if(conf.getType() == 2){
            dsType = Constants.DB_TYPE_HDFS;
        }else if(conf.getType() == 3){
            dsType = Constants.DB_TYPE_OOS;
        }

        Map<String, Object> dataSourceMap = new HashMap<>(4);
        dataSourceMap.put(ID, conf.getId());
        dataSourceMap.put(NAME, "文件管理");
        dataSourceMap.put(DATASOURCE_TYPE, dsType);
        dataSourceMap.put("createdBy", "1");
        dataSourceMap.put(SYSTEM_BUILT, 1);
        return dataSourceMap;
    }

    /**
     * 通过数据源ID获取表
     * @param datasourceId 数据源ID
     * @return 数据库表
     */
    public Map<String, List<String>> getTableListByDatasourceId(String datasourceId) throws Exception {
        Map<String, List<String>> map = new HashMap<>(4);
        List<String> tableList = baseDbService.getTableList(datasourceId);

		// 对表名按升序排序
		Collections.sort(tableList);
		String currentSchema = baseDbService.getCurrentSchema(datasourceId);
		map.put(currentSchema, tableList);
		return map;
	}

	public List<String> getTablesByDatasourceId(String datasourceId) throws Exception {
		List<String> tableList = baseDbService.getTableList(datasourceId);

		// 对表名按升序排序
		Collections.sort(tableList);
		return tableList;
	}

	/**
	 * 根据数据源id返回BaseDataSource
	 *
	 * @param datasourceId
	 * @return BaseDataSource
	 */
	public BaseDataSource getDatasource(String datasourceId) {
		DataSourceDTO dataSourceDTO = dataSourceService.queryDataSourceById(datasourceId);
		return DBUtils.getBaseDataSource(dataSourceDTO);
	}

	/**
	 * 获取主键列
	 *
	 * @param taskId 任务ID
	 * @return 主键列
	 */
	public List<ColumnInfo> getPrimaryKeyList(String taskId) throws Exception {
		TaskDTO taskDTO = taskService.selectByPrimaryKey(taskId);
		CheckParaUtils.checkDbObjectValidity(taskDTO, logger, String.format(DcsErrorCode.TASK_NULL.getDesc(), taskId));

		JobDTO jobDTO = jobService.selectByPrimaryKey(taskDTO.getJobId());
		CheckParaUtils.checkDbObjectValidity(jobDTO, logger, String.format(DcsErrorCode.JOB_NULL.getDesc(), taskDTO.getJobId()));

		return baseDbService.getPrimaryKeyList(jobDTO.getSrcDatasourceId(), taskDTO.getSrcTableName());
	}

	/**
	 * 获取时间类型的列
	 *
	 * @param taskId 任务ID
	 * @return 时间类型的列
	 */
	public List<ColumnInfo> getDateTimeColumnList(String taskId) throws Exception {
		TaskDTO taskDTO = taskService.selectByPrimaryKey(taskId);
		CheckParaUtils.checkDbObjectValidity(taskDTO, logger, String.format(DcsErrorCode.TASK_NULL.getDesc(), taskId));

		JobDTO jobDTO = jobService.selectByPrimaryKey(taskDTO.getJobId());
		CheckParaUtils.checkDbObjectValidity(jobDTO, logger, String.format(DcsErrorCode.JOB_NULL.getDesc(), taskDTO.getJobId()));

		return baseDbService.getDateTimeColumnList(jobDTO.getSrcDatasourceId(), taskDTO.getSrcTableName());
	}

	/**
	 * 获取iotdb设备列表
	 *
	 * @param jobId
	 * @return iotdb设备列表
	 */
	public List<String> getDeviceList(String jobId) throws Exception {
		JobDTO jobDTO = jobService.selectByPrimaryKey(jobId);
		CheckParaUtils.checkDbObjectValidity(jobDTO, logger, String.format(DcsErrorCode.JOB_NULL.getDesc(), jobId));

		JobDatasource dstDatasource = dataSourceUtils.convertDatasource(jobDTO.getDstDatasourceId());
		IotDbSessionUtils.initIotdbSessionPool(dstDatasource.getJdbcUrl(), dstDatasource.getJdbcUsername(), dstDatasource.getEncryptJdbcPassword());
		return IotDbSessionUtils.getDeviceList();
	}

	/**
	 * 获取kafka主题分区信息
	 *
	 * @param datasourceId 数据源ID
	 * @return kafka主题分区信息
	 */
	public Map<String, List<String>> getKafkaTopicsAndPartitionByDataSourceId(String datasourceId) throws Exception {
		// 获取数据源对象
		JobDatasource datasource = dataSourceUtils.convertDatasource(datasourceId);
		Map<String, List<String>> kafkaInfo = getKafkaInfo(datasource);
		// 对返回的mapkafkaInfo进行按key值排序
		return kafkaInfo.entrySet().stream()
				.sorted(Map.Entry.comparingByKey())
				.collect(Collectors.toMap(
						Map.Entry::getKey,
						Map.Entry::getValue,
						(oldVal, newVal) -> oldVal,
						LinkedHashMap::new
				));
	}

	/**
	 * 获取kafka主题分区信息
	 *
	 * @return
	 */
	private Map<String, List<String>> getKafkaInfo(JobDatasource datasource) {
		Properties properties = new Properties();
		properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, datasource.getJdbcUrl());
		properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
		properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
		KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
		Map<String, List<String>> map = new HashMap<>(16);
		Map<String, List<PartitionInfo>> stringListMap = kafkaConsumer.listTopics();
		Set<String> keySet = stringListMap.keySet();
		for (String key : keySet) {
			List<PartitionInfo> partitionInfos = stringListMap.get(key);
			ArrayList<String> list = new ArrayList<>();
			for (PartitionInfo partitionInfo : partitionInfos) {
				list.add(partitionInfo.partition() + "");
			}
			map.put(key, list);
		}
		return map;
	}

	/**
	 * 根据主题获取Active状态的消费者组信息
	 *
	 * @param datasourceId
	 * @param topic
	 * @return
	 */
	public List<String> getConsumerGroupByTopic(String datasourceId, String topic) {
		// 指定topic关联active消费者组ID列表
		List<String> consumerGroupIds = new ArrayList<>();
		// kafka中所有消费者组ID列表
		List<String> allGroupIds = new ArrayList<>();

		try {
			// step1:获取数据源对象
			JobDatasource datasource = dataSourceUtils.convertDatasource(datasourceId);

			// step2:获取Kafka中所有消费者组ID列表
			Properties props = new Properties();
			props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, datasource.getJdbcUrl());
			AdminClient client = AdminClient.create(props);
            for (ConsumerGroupListing consumerGroupListing : client.listConsumerGroups().valid().get(TIME_OUT, TimeUnit.SECONDS)) {
                System.out.println(consumerGroupListing.groupId());
                allGroupIds.add(consumerGroupListing.groupId());
            }

			// step3:根据消费者组ID列表，获取消费者组元数据信息，然后根据topicName过滤（注意：此处仅能获取到active状态的消费者组）
			Map<String, ConsumerGroupDescription> allGroupDetails =
					client.describeConsumerGroups(allGroupIds).all().get(TIME_OUT, TimeUnit.SECONDS);
			for (Map.Entry<String, ConsumerGroupDescription> entry : allGroupDetails.entrySet()) {
				String groupId = entry.getKey();
				ConsumerGroupDescription description = entry.getValue();
                for (MemberDescription memberDescription : description.members()) {
                    MemberAssignment assignment = memberDescription.assignment();
                    for (TopicPartition topicPartition : assignment.topicPartitions()) {
                        if (topicPartition.topic().equals(topic)) {
                            consumerGroupIds.add(groupId);
                        }
                    }
                }
			}
		} catch (Exception e) {
			throw new RuntimeException("获取Kafka中消费者组失败,errMsg=" + e.getMessage());
		}

		return consumerGroupIds;
	}
}
