package cn.hexcloud.dga.lineage.service.impl;

import cn.hexcloud.dga.common.util.SqlParser;
import cn.hexcloud.dga.ds.bean.TDsTaskDefinition;
import cn.hexcloud.dga.ds.service.TDsTaskDefinitionService;
import cn.hexcloud.dga.ds.service.TDsTaskInstanceService;
import cn.hexcloud.dga.lineage.bean.GovernanceLineageTable;
import cn.hexcloud.dga.lineage.mapper.GovernanceLineageTableMapper;
import cn.hexcloud.dga.lineage.service.GovernanceLineageTableService;
import cn.hexcloud.dga.meta.bean.TableMetaInfo;
import cn.hexcloud.dga.meta.service.TableMetaInfoService;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import lombok.Getter;
import lombok.Setter;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.*;
import java.util.stream.Collectors;

/**
 * <p>
 * 服务实现类
 * </p>
 *
 * @author jiangdan7
 * @since 2023-09-06
 */
@Service
@DS("dga")
public class GovernanceLineageTableServiceImpl extends ServiceImpl<GovernanceLineageTableMapper, GovernanceLineageTable> implements GovernanceLineageTableService {

    @Autowired
    TDsTaskInstanceService tDsTaskInstanceService;

    @Autowired
    TDsTaskDefinitionService tDsTaskDefinitionService;

    @Autowired
    TableMetaInfoService tableMetaInfoService;

    /**
     * 1 查询所有表 任务实例
     * 2  根据任务实例查询任务的定义
     * 3  根据任务定义提取sql
     * 4  逐个分析sql    -->   map1 <表名 ,Set<来源表>>
     * map2 <表名 ,Set<输出表>>
     * 5 获得所有表 ，进行迭代 迭代过程中查询map1  map2 获得来源表 输出表  --> List<GovernanceLineageTable>
     * 6 保存 List<GovernanceLineageTable>
     */
    @Override
    public void initLineage(String assessDate) {
        // 清理当期数据
        remove(new QueryWrapper<GovernanceLineageTable>().eq("governance_date", assessDate));
        // 从table_meta_info中获取所有的hive表名
        List<TableMetaInfo> tableMetaInfoList = tableMetaInfoService.getTableMetaInfoList();
        List<String> tableNameWithSchemaList = tableMetaInfoList.stream().map(a -> a.getSchemaName() + "." + a.getTableName()).collect(Collectors.toList());
        Map<String, TDsTaskDefinition> tdsTaskDefinitionMap = tDsTaskDefinitionService.getTableDefinitionMapByDt(assessDate, tableNameWithSchemaList);
        // 对sql进行分析
        HashMap<String, Set<String>> sourceTableMap = new HashMap<>(tableMetaInfoList.size());
        HashMap<String, Set<String>> sinkTableMap = new HashMap<>(tableMetaInfoList.size());
        for (Map.Entry<String, TDsTaskDefinition> entry : tdsTaskDefinitionMap.entrySet()) {
            String key = entry.getKey();
            TDsTaskDefinition tDsTaskDefinition = entry.getValue();
            String sql = tDsTaskDefinition.getSql();
            if (sql != null) {
                TableRefDispatcher tableRefDispatcher = new TableRefDispatcher();
                tableRefDispatcher.defaultSchemaName = key.split("\\.")[0];
                SqlParser.sqlParse(sql, tableRefDispatcher);
                Set<String> tableRefSet = tableRefDispatcher.tableRefSet;
                // 关联表去除自身
                tableRefSet.remove(key);
                // 排除别名表
                tableRefSet = tableRefSet.stream().filter(tableNameWithSchemaList::contains).collect(Collectors.toSet());
                if (tableRefSet.size() == 0) {
                    continue;
                }
                // 添加上游表映射
                sourceTableMap.put(key, tableRefSet);
                // 添加下游表映射
                for (String table : tableRefSet) {
                    Set<String> sinkTableSet = sinkTableMap.get(table);
                    if (sinkTableSet == null) {
                        sinkTableSet = new HashSet<>();
                    }
                    sinkTableSet.add(key);
                    sinkTableMap.put(table, sinkTableSet);
                }
            }
        }
        List<GovernanceLineageTable> governanceLineageTables = new ArrayList<>();
        for (String tableName : tableNameWithSchemaList) {
            if (sourceTableMap.get(tableName) == null && sinkTableMap.get(tableName) == null) {
                continue;
            }
            GovernanceLineageTable governanceLineageTable = new GovernanceLineageTable();
            String[] split = tableName.split("\\.");
            governanceLineageTable.setSchemaName(split[0]);
            governanceLineageTable.setTableName(split[1]);
            governanceLineageTable.setSourceTables(StringUtils.join(sourceTableMap.get(tableName), ","));
            governanceLineageTable.setSinkTables(StringUtils.join(sinkTableMap.get(tableName), ","));
            governanceLineageTable.setCreateTime(new Date());
            governanceLineageTable.setGovernanceDate(assessDate);
            governanceLineageTables.add(governanceLineageTable);
        }
        saveBatch(governanceLineageTables, 500);
    }

    @Override
    public GovernanceLineageTable getOne(String tableNameWithSchema) {
        String[] split = tableNameWithSchema.split("\\.");
        String schemaName = split[0];
        String tableName = split[1];
        GovernanceLineageTable one = getOne(new QueryWrapper<GovernanceLineageTable>().eq("schema_name", schemaName).eq("table_name", tableName).orderByDesc("governance_date").last("limit 1"));
        TableMetaInfo tableMetaInfo = tableMetaInfoService.getOne(new QueryWrapper<TableMetaInfo>().eq("schema_name", schemaName).eq("table_name", tableName).orderByDesc("assess_date").last("limit 1"));
        if (one != null) {
            one.setComment(tableMetaInfo.getTableComment());
        }
        return one;
    }

    private static class TableRefDispatcher implements Dispatcher {

        @Getter
        HashSet<String> tableRefSet = new HashSet<>();
        @Setter
        String defaultSchemaName = null;

        @Override
        public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
            ASTNode astNode = (ASTNode) nd;
            if (astNode.getType() == HiveParser.TOK_TABREF) {
                ASTNode tableNameAllNode = (ASTNode) astNode.getChild(0);
                String tableName = null;
                if (tableNameAllNode.getChildCount() == 1) {
                    tableName = defaultSchemaName + "." + tableNameAllNode.getChild(0).getText();
                } else {
                    tableName = tableNameAllNode.getChild(0).getText() + "." + tableNameAllNode.getChild(1).getText();
                }
                tableRefSet.add(tableName);
            }
            return null;
        }
    }
}
