package com.openlogic.logic.service.datasource.impl;

import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.noear.solon.annotation.Component;
import org.noear.solon.annotation.Inject;
import org.noear.solon.cloud.CloudClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mybatisflex.core.query.QueryWrapper;
import com.openlogic.common.core.constants.CacheConstant;
import com.openlogic.common.core.exception.logic.RuleException;
import com.openlogic.common.core.web.domain.model.LoginUser;
import com.openlogic.common.plugins.db.api.dto.BaseDataSourceParamDto;
import com.openlogic.common.plugins.db.api.utils.DataSourceUtils;
import com.openlogic.common.plugins.db.api.utils.JSONUtils;
import com.openlogic.common.plugins.spi.constants.CommonConstants;
import com.openlogic.common.plugins.spi.enums.DbType;
import com.openlogic.common.redis.service.RedisService;
import com.openlogic.common.security.utils.SecurityUtil;
import com.openlogic.datasource.api.entity.DataColumnEntity;
import com.openlogic.datasource.api.entity.DataSourceEntity;
import com.openlogic.datasource.api.entity.DataTableEntity;
import com.openlogic.datasource.api.entity.table.Tables;
import com.openlogic.datasource.api.enums.DataCategory;
import com.openlogic.datasource.api.enums.DataLevel;
import com.openlogic.datasource.api.enums.DataStatus;
import com.openlogic.datasource.api.enums.SyncStatus;
import com.openlogic.datasource.api.event.DataCacheEvent;
import com.openlogic.datasource.api.event.DataCacheEventModel;
import com.openlogic.datasource.api.event.DataDeleteEvent;
import com.openlogic.datasource.api.event.DataDeleteEventModel;
import com.openlogic.datasource.api.param.BaseConnectionParam;
import com.openlogic.datasource.api.param.ConnectionParam;
import com.openlogic.datasource.api.query.DataColumnQuery;
import com.openlogic.datasource.api.query.DataSourceQuery;
import com.openlogic.datasource.api.vo.DataTreeVo;
import com.openlogic.logic.async.AsyncTask;
import com.openlogic.logic.mapper.datasource.DataColumnMapper;
import com.openlogic.logic.mapper.datasource.DataSourceMapper;
import com.openlogic.logic.mapper.datasource.DataTableMapper;
import com.openlogic.logic.service.datasource.DataSourceService;

import cn.hutool.core.util.StrUtil;

/**
 * @ClassName: ConnectorServiceImpl
 * @Description: 数据源Service业务层实现
 * @author weiyongpeng
 * @date 2024年11月6日 下午8:49:29
 */
@Component
public class DataSourceServiceImpl implements DataSourceService {

	private static final Logger log = LoggerFactory.getLogger(DataSourceServiceImpl.class);

	@Inject
	private RedisService redisService;

	@Inject
    private AsyncTask asyncTask;

	@Inject
	private DataSourceMapper dataSourceMapper;

	@Inject
	private DataTableMapper dataTableMapper;

	@Inject
	private DataColumnMapper dataColumnMapper;

	private DataCacheEvent dataCacheEvent = new DataCacheEvent();

	private DataDeleteEvent dataDeleteEvent = new DataDeleteEvent();

	@Override
	public Map<String, List<String>> getConnectorTypeAll() {
		ArrayList<String> connectorTypes = new ArrayList<>(DataSourceUtils.getDatasourceTypes());
		Map<String, List<String>> grouped = connectorTypes.stream().collect(Collectors.groupingBy(sourceType -> {
			if (sourceType.equalsIgnoreCase(DataCategory.EXCEL.getKey())) {
				return DataCategory.EXCEL.getKey();
			} else if (sourceType.equalsIgnoreCase(DataCategory.API.getKey())) {
				return DataCategory.API.getKey();
			} else {
				return DataCategory.RDB.getKey();
			}
		}));
		return grouped;
	}

	@Override
	public void createDataSource(BaseDataSourceParamDto dataSourceParam) {
		LoginUser loginUser = SecurityUtil.getLoginUser();
		DataSourceUtils.checkDatasourceParam(dataSourceParam);
		// 检查数据源名称是否已被使用过
		if (checkName(dataSourceParam.getName())) {
			throw new RuleException("数据源名称已存在");
		}

		// 检查数据源连接是否正常
		ConnectionParam connectionParam = DataSourceUtils.buildConnectionParams(dataSourceParam);
		checkConnection(dataSourceParam.getType(), connectionParam);

		// 构建数据源Entity
		DataSourceEntity dataSource = new DataSourceEntity();
		long id = CloudClient.id().generate();

		dataSource.setName(dataSourceParam.getName().trim());
		dataSource.setNote(dataSourceParam.getDesc());
		dataSource.setDbType(dataSourceParam.getType().getDb());
		dataSource.setStatus(DataStatus.ENABLE.getKey());
		dataSource.setIsSync(SyncStatus.NotSync.getKey());
		dataSource.setCreateBy(String.valueOf(loginUser.getSysUser().getUserId()));
		dataSource.setUpdateBy(String.valueOf(loginUser.getSysUser().getUserId()));
		dataSource.setId(String.valueOf(id));
		dataSource.setCategory(dataSourceParam.getCategory());
		dataSource.setConnectionParams(connectionParam);
		try {
			dataSourceMapper.insert(dataSource);
		} catch (Exception ex) {
			log.error("Create datasource error.", ex);
			throw new RuleException("数据源创建失败");
		}
		// 发布缓存数据源事件，提高查询效率
		dataCacheEvent.publishCacheEvent(new DataCacheEventModel(CacheConstant.METADATA_SOURCE_KEY));
	}

	@Override
	public Boolean checkConnection(DbType type, ConnectionParam connectionParam) {
		try (Connection connection = DataSourceUtils.getConnection(type, connectionParam)) {
			if (connection == null) {
				throw new RuleException("测试数据源连接失败");
			}
			return Boolean.TRUE;
		} catch (Exception e) {
			String message = Optional.of(e).map(Throwable::getCause).map(Throwable::getMessage).orElse(e.getMessage());
			log.error("datasource test connection error, dbType:{}, connectionParam:{}, message:{}.", type,
					connectionParam, message);
			throw new RuleException("测试数据源连接失败");
		}
	}

	private boolean checkName(String name) {
		List<DataSourceEntity> queryDataSource = dataSourceMapper
				.selectListByQuery(QueryWrapper.create().where(Tables.dataSource.name.eq(name.trim())));
		return queryDataSource != null && !queryDataSource.isEmpty();
	}

	@Override
	public void updateDataSource(String id, BaseDataSourceParamDto dataSourceParam) {
		LoginUser loginUser = SecurityUtil.getLoginUser();
		DataSourceUtils.checkDatasourceParam(dataSourceParam);
		// 检查数据源是否存在
		DataSourceEntity dataSource = dataSourceMapper.selectOneById(id);
		if (dataSource == null) {
			throw new RuleException("数据源不存在");
		}
		// 检查数据源名称是否存在
		if (!dataSource.getName().trim().equals(dataSource.getName()) && checkName(dataSource.getName())) {
			throw new RuleException("数据源名称已存在");
		}

		// 检查密码是否更新，如果更新，则使用新的
		BaseConnectionParam connectionParam = (BaseConnectionParam) DataSourceUtils
				.buildConnectionParams(dataSourceParam);
		String password = connectionParam.getPassword();
		if (StringUtils.isBlank(password)) {
			ConnectionParam oldConnectionParams = dataSource.getConnectionParams();
			ObjectNode oldParams = JSONUtils.parseObject(JSONUtils.toJsonString(oldConnectionParams));
			connectionParam.setPassword(oldParams.path(CommonConstants.PASSWORD).asText());
		}
		checkConnection(DbType.getDbType(dataSource.getDbType()), connectionParam);

		// 构建数据源Entity
		dataSource.setName(dataSourceParam.getName().trim());
		dataSource.setNote(dataSourceParam.getDesc());
		dataSource.setDbType(dataSourceParam.getType().getDb());
		dataSource.setStatus(DataStatus.ENABLE.getKey());
		dataSource.setIsSync(SyncStatus.NotSync.getKey());
		dataSource.setUpdateBy(String.valueOf(loginUser.getUserid()));
		dataSource.setConnectionParams(connectionParam);
		try {
			dataSourceMapper.update(dataSource);
		} catch (Exception ex) {
			log.error("Create datasource error.", ex);
			throw new RuleException("数据源创建失败");
		}
		// 发布缓存数据源事件，提高查询效率
		dataCacheEvent.publishCacheEvent(new DataCacheEventModel(CacheConstant.METADATA_SOURCE_KEY));
	}

	@Override
	public DataSourceEntity getDataSourceById(String id) {
		List<DataSourceEntity> sourceList = redisService.getCacheList(CacheConstant.METADATA_SOURCE_KEY);
		Stream<DataSourceEntity> stream = Optional.ofNullable(sourceList).orElseGet(ArrayList::new).stream()
				.filter(s -> /* DataStatus.ENABLE.getKey().equals(s.getStatus()) && */ s.getId().equals(id));
		Optional<DataSourceEntity> firstElement = stream.findFirst();
		return firstElement.isPresent() ? firstElement.get() : dataSourceMapper.selectOneById(id);
	}

	@Override
	public List<DataSourceEntity> getDataSourceList(DataSourceQuery dataSourceQuery) {
		List<DataSourceEntity> sourceList = redisService.getCacheList(CacheConstant.METADATA_SOURCE_KEY);
		Stream<DataSourceEntity> stream = Optional.ofNullable(sourceList).orElseGet(ArrayList::new).stream()
		/* .filter(s -> DataStatus.ENABLE.getKey().equals(s.getStatus())) */;
		if (StringUtils.isNotBlank(dataSourceQuery.getName())) {
			stream = stream.filter(s -> s.getName().equals(dataSourceQuery.getName()));
		}
		List<DataSourceEntity> result = stream.collect(Collectors.toList());
		return CollectionUtils.isNotEmpty(result) ? result
				: dataSourceMapper.selectListByQuery(
						QueryWrapper.create().where(Tables.dataSource.name.eq(dataSourceQuery.getName().trim())));
	}

	@Override
	public void deleteDataSourceById(String id) {
		try {
			dataSourceMapper.deleteById(id);
			// 发布缓存数据源事件，提高查询效率
			dataCacheEvent.publishCacheEvent(new DataCacheEventModel(CacheConstant.METADATA_SOURCE_KEY));
			// 删除数据表和列-事件
			dataDeleteEvent.publishDeleteEvent(new DataDeleteEventModel(id));
		} catch (Exception e) {
			log.error("删除失败，异常原因：{}", e);
			throw new RuleException("单条删除操作失败!");
		}
	}

	@Override
	public void deleteDataSourceBatch(List<String> ids) {
		try {
			dataSourceMapper.deleteBatchByIds(ids);
			// 发布缓存数据源事件，提高查询效率
			dataCacheEvent.publishCacheEvent(new DataCacheEventModel(CacheConstant.METADATA_SOURCE_KEY));
			// 删除数据表和列-事件
			dataDeleteEvent.publishDeleteEvent(new DataDeleteEventModel(ids));
		} catch (Exception e) {
			log.error("删除失败，异常原因：{}", e);
			throw new RuleException("批量删除操作失败!");
		}
	}

	@Override
	public void syncMetadata(String id) {
		DataSourceEntity dataSourceEntity = dataSourceMapper.selectOneById(id);
		if (SyncStatus.InSync.getKey().equals(dataSourceEntity.getIsSync())) {
			dataSourceEntity.setIsSync(SyncStatus.NotSync.getKey());
			dataSourceMapper.update(dataSourceEntity);
			throw new RuleException("元数据同步中");
		}
		// 校验数据库连接性
		ConnectionParam connectionParams = dataSourceEntity.getConnectionParams();
		Boolean flag = this.checkConnection(DbType.getDbType(dataSourceEntity.getDbType()), connectionParams);
		if (flag) {
			dataSourceEntity.setIsSync(SyncStatus.InSync.getKey());
			dataSourceMapper.update(dataSourceEntity);
			// 异步执行同步任务
	        asyncTask.doTask(dataSourceEntity);
		} else {
			dataSourceEntity.setIsSync(SyncStatus.NotSync.getKey());
			dataSourceMapper.update(dataSourceEntity);
			throw new RuleException("连接异常,请检查连接配置!");
		}
	}

	@Override
	public void refreshMetadata() {
		String sourceKey = CacheConstant.METADATA_SOURCE_KEY;
		Boolean hasSourceKey = redisService.hasKey(sourceKey);
		if (hasSourceKey) {
			redisService.deleteObject(sourceKey);
		}
		List<DataSourceEntity> sourceEntityList = dataSourceMapper.selectListByQuery(QueryWrapper.create());
		if (CollectionUtils.isNotEmpty(sourceEntityList)) {
			redisService.setCacheList(sourceKey, sourceEntityList);
		}

		String tableKey = CacheConstant.METADATA_TABLE_KEY;
		Boolean hasTableKey = redisService.hasKey(tableKey);
		if (hasTableKey) {
			redisService.deleteObject(tableKey);
		}
		List<DataTableEntity> tableEntityList = dataTableMapper.selectListByQuery(QueryWrapper.create());
		if (CollectionUtils.isNotEmpty(tableEntityList)) {
			Map<String, List<DataTableEntity>> tableListMap = tableEntityList.stream()
					.collect(Collectors.groupingBy(DataTableEntity::getSourceId));
			redisService.setCacheMap(tableKey, tableListMap);
		}

		String columnKey = CacheConstant.METADATA_COLUMN_KEY;
		Boolean hasColumnKey = redisService.hasKey(columnKey);
		if (hasColumnKey) {
			redisService.deleteObject(columnKey);
		}
		List<DataColumnEntity> columnEntityList = dataColumnMapper.selectListByQuery(QueryWrapper.create());
		if (CollectionUtils.isNotEmpty(columnEntityList)) {
			Map<String, List<DataColumnEntity>> columnListMap = columnEntityList.stream()
					.collect(Collectors.groupingBy(DataColumnEntity::getTableId));
			redisService.setCacheMap(columnKey, columnListMap);
		}
	}

	@Override
	public Map<String, List<DataTreeVo>> getDataMetadataTree(String level, DataColumnQuery metadataColumnQuery) {
		List<DataSourceEntity> sourceList = redisService.getCacheList(CacheConstant.METADATA_SOURCE_KEY);
		// 如果数据缓存为空，则查询数据库
		Stream<DataSourceEntity> stream = Optional.ofNullable(sourceList)
				.orElseGet(() -> dataSourceMapper.selectListByQuery(
						QueryWrapper.create().where(DataSourceEntity::getStatus).eq((DataStatus.ENABLE.getKey()))))
				.stream();
		if (StrUtil.isNotBlank(metadataColumnQuery.getSourceId())) {
			stream = stream.filter(s -> metadataColumnQuery.getSourceId().equals(s.getId()));
		} else if (StrUtil.isNotBlank(metadataColumnQuery.getSourceName())) {
			stream = stream.filter(s -> metadataColumnQuery.getSourceName().equals(s.getName()));
		}
		Map<String, List<DataTreeVo>> list = stream.filter(s -> DataStatus.ENABLE.getKey().equals(s.getStatus()))
				.map(m -> {
					DataTreeVo tree = new DataTreeVo();
					tree.setType(DataCategory.getCategory(m.getCategory()).getKey());
					tree.setChildren(
							getDataBaseChildrens(m.getId(), level, m.getCategory(), metadataColumnQuery.getTableId()));
					return tree;
				}).collect(Collectors.groupingBy(DataTreeVo::getType));

		// 合并同一类型的 children
		Map<String, List<DataTreeVo>> mergedList = new HashMap<>();
		list.forEach((key, value) -> {
			List<DataTreeVo> combinedChildren = value.stream().flatMap(dataTreeVo -> dataTreeVo.getChildren().stream())
					.collect(Collectors.toList());
			mergedList.put(key, combinedChildren);
		});

		return mergedList;
	}

	@Override
	public List<DataTreeVo> getSqlConsoleDataTree(DataColumnQuery dataColumnQuery) {
		List<DataSourceEntity> sourceList = redisService.getCacheList(CacheConstant.METADATA_SOURCE_KEY);
		// 如果数据缓存为空，则查询数据库
		Stream<DataSourceEntity> stream = Optional.ofNullable(sourceList)
				.orElseGet(() -> dataSourceMapper.selectListByQuery(
						QueryWrapper.create().where(DataSourceEntity::getStatus).eq((DataStatus.ENABLE.getKey()))))
				.stream();
		if (StrUtil.isNotBlank(dataColumnQuery.getSourceName())) {
			stream = stream.filter(s -> dataColumnQuery.getSourceName().equals(s.getName()));
		} else if (StrUtil.isNotBlank(dataColumnQuery.getDbType())) {
			stream = stream.filter(s -> DbType.ofName(dataColumnQuery.getDbType()).getDb().equals(s.getDbType()));
		}
		List<DataTreeVo> list = stream.filter(s -> DataStatus.ENABLE.getKey().equals(s.getStatus())).map(m -> {
			DataTreeVo tree = new DataTreeVo();
			tree.setId(m.getId());
			tree.setType(DataLevel.DATABASE.getKey());
			tree.setLabel(m.getName());
			tree.setName(m.getName());
			tree.setChildren(getTableChildrens(m.getId(), DataLevel.COLUMN.getKey(),
					DataCategory.RDB.getLevel().toString(), dataColumnQuery.getTableId()));
			return tree;
		}).collect(Collectors.toList());
		return list;
	}

	@Override
	public List<DataTreeVo> getDataSourceTree(DataColumnQuery dataColumnQuery) {
		List<DataSourceEntity> sourceList = redisService.getCacheList(CacheConstant.METADATA_SOURCE_KEY);
		// 如果数据缓存为空，则查询数据库
		Stream<DataSourceEntity> stream = Optional.ofNullable(sourceList)
				.orElseGet(() -> dataSourceMapper.selectListByQuery(
						QueryWrapper.create().where(DataSourceEntity::getStatus).eq((DataStatus.ENABLE.getKey()))))
				.stream();

		if (StrUtil.isNotBlank(dataColumnQuery.getSourceName())) {
			stream = stream.filter(s -> dataColumnQuery.getSourceName().equals(s.getName()));
		}
		List<DataTreeVo> list = stream.filter(s -> DataStatus.ENABLE.getKey().equals(s.getStatus())).map(m -> {
			DataTreeVo tree = new DataTreeVo();
			tree.setId(m.getId());
			tree.setType(DbType.getDbType(m.getDbType()).getDesc());
			tree.setLabel(DbType.getDbType(m.getDbType()).getDesc());
			tree.setName(DbType.getDbType(m.getDbType()).getDesc());
			tree.setChildren(getDataBaseChildrens(m.getId(), DataLevel.DATABASE.getKey(), m.getCategory(),
					dataColumnQuery.getTableId()));
			return tree;
		}).collect(Collectors.toList());
		return list;
	}

	private List<DataTreeVo> getDataBaseChildrens(String id, String level, String category, String tableId) {
		List<DataSourceEntity> tableList = redisService.getCacheList(CacheConstant.METADATA_SOURCE_KEY);
		Stream<DataSourceEntity> stream = Optional.ofNullable(tableList)
				.orElseGet(() -> dataSourceMapper.selectListByQuery(
						QueryWrapper.create().where(DataSourceEntity::getStatus).eq((DataStatus.ENABLE.getKey()))))
				.stream();
		if (StrUtil.isNotBlank(category)) {
			stream = stream.filter(s -> category.equals(s.getCategory()) && id.equals(s.getId()));
		}
		List<DataTreeVo> children = stream.filter(s -> DataStatus.ENABLE.getKey().equals(s.getStatus())).map(m -> {
			DataTreeVo tree = new DataTreeVo();
			tree.setId(m.getId());
			tree.setDbType(m.getDbType());
			if (category.equals(String.valueOf(DataCategory.RDB.getLevel()))) {
				tree.setType(DataLevel.DATABASE.getKey());
			} else {
				tree.setType(DataCategory.getCategory(category).getKey() + "-source");
			}
			tree.setLabel(m.getName());
			tree.setName(m.getName());
			if (DataLevel.getLevel(level).getLevel() >= DataLevel.TABLE.getLevel()) {
				tree.setChildren(getTableChildrens(m.getId(), level, category, tableId));
			}
			return tree;
		}).collect(Collectors.toList());
		return children;
	}

	@SuppressWarnings("unchecked")
	private List<DataTreeVo> getTableChildrens(String id, String level, String category, String tableId) {
//		List<DataTableEntity> tableList = (List<DataTableEntity>) redisService
//				.getCacheMapValue(CacheConstant.METADATA_TABLE_KEY, id);
		List<DataTableEntity> tableList = (List<DataTableEntity>) redisService
				.getCacheMapValue(CacheConstant.METADATA_TABLE_KEY, id, DataTableEntity.class);

		Stream<DataTableEntity> stream = Optional.ofNullable(tableList)
				.orElseGet(() -> dataTableMapper.selectListByQuery(QueryWrapper.create())).stream();
		if (StrUtil.isNotBlank(tableId)) {
			stream = stream.filter(s -> tableId.equals(s.getId()));
		}
		List<DataTreeVo> children = stream.map(m -> {
			DataTreeVo tree = new DataTreeVo();
			tree.setId(m.getId());
			if (category != null && category.equals(String.valueOf(DataCategory.RDB.getLevel()))) {
				tree.setType(DataLevel.TABLE.getKey());
			} else if (category != null && !category.equals(String.valueOf(DataCategory.RDB.getLevel()))) {
				tree.setType(DataCategory.getCategory(category).getKey() + "-table");
			}
			tree.setName(m.getTableComment());
			tree.setCode(m.getTableName());
			tree.setLabel(StrUtil.isBlank(m.getTableComment()) ? m.getTableName() : m.getTableComment());
			if (DataLevel.getLevel(level).getLevel() >= DataLevel.COLUMN.getLevel()) {
				tree.setChildren(getColumnChildrens(m.getId()));
			}
			return tree;
		}).collect(Collectors.toList());
		return children;
	}

	@SuppressWarnings("unchecked")
	private List<DataTreeVo> getColumnChildrens(String id) {
//		List<DataColumnEntity> columnList = (List<DataColumnEntity>) redisService
//				.getCacheMapValue(CacheConstant.METADATA_COLUMN_KEY, id);
		List<DataColumnEntity> columnList = (List<DataColumnEntity>) redisService
				.getCacheMapValue(CacheConstant.METADATA_COLUMN_KEY, id, DataColumnEntity.class);
		Stream<DataColumnEntity> stream = Optional.ofNullable(columnList)
				.orElseGet(() -> dataColumnMapper.selectListByQuery(QueryWrapper.create())).stream();
		List<DataTreeVo> children = stream.map(m -> {
			DataTreeVo tree = new DataTreeVo();
			tree.setId(m.getId());
			tree.setType(DataLevel.COLUMN.getKey());
			tree.setName(m.getColumnComment());
			tree.setCode(m.getColumnName());
			tree.setLabel(StrUtil.isBlank(m.getColumnComment()) ? m.getColumnName() : m.getColumnComment());
			return tree;
		}).collect(Collectors.toList());
		return children;
	}

	@Override
	public void updateDataSourceStatus(String id, String status) {
		DataSourceEntity dataSourceEntity = new DataSourceEntity();
		dataSourceEntity.setId(id);
		dataSourceEntity.setStatus(status);
		int row = dataSourceMapper.updateByQuery(dataSourceEntity,
				QueryWrapper.create().where(Tables.dataSource.id.eq(id)));
		if (row > 0) {
			// 发布缓存数据源事件，提高查询效率
			dataCacheEvent.publishCacheEvent(new DataCacheEventModel(CacheConstant.METADATA_SOURCE_KEY));
		}
	}

}
