package com.central.scheduler.api.sqlparser;

import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.visitor.SchemaStatVisitor;
import com.alibaba.druid.stat.TableStat;
import com.alibaba.druid.util.JdbcConstants;
import com.central.common.entity.DataRelated;
import com.central.common.entity.FlinkSqlTaskContent;
import com.central.common.entity.FlinkTable;
import com.central.common.entity.SqlTaskContent;
import com.central.common.enums.PrivilegeType;

import java.util.*;

/**
 * @author Tindy
 * @date 2022/1/23
 * @describe
 */
public class FlinkSqlParser implements BaseSqlParser{
    private static FlinkSqlParser sqlParser;
    public static FlinkSqlParser getInstance() {
        if(sqlParser==null) sqlParser=new FlinkSqlParser();
        return sqlParser;
    }

    public List<DataRelated>  dataRelatedParser(FlinkSqlTaskContent sqlTaskContent) {
        List<DataRelated> result = new ArrayList<>();
        List<SQLStatement> sqlStatements = SQLUtils.parseStatements(sqlTaskContent.getSql().toLowerCase(), JdbcConstants.HIVE);
        Map<String, FlinkTable> tableMap = sqlTaskContent.getTableMap();
        for (SQLStatement sqlStatement : sqlStatements) {
            DataRelated dataRelated = new DataRelated();
            if(sqlStatement instanceof SQLSelectStatement){
                getFromTo(sqlStatement,dataRelated,tableMap);
            }else if(sqlStatement instanceof SQLInsertStatement){
                getFromTo(sqlStatement,dataRelated,tableMap);
            }else {
                throw new IllegalArgumentException("sql类型错误，数据血缘解析失败,sql语句仅支持 select，insert into 2种");
            }
            result.add(dataRelated);
        }
        return result;
    }

    public void getFromTo(SQLStatement sqlStatement,DataRelated dataRelated,Map<String, FlinkTable> tableMap){
        SchemaStatVisitor schemaStatVisitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.HIVE);
        sqlStatement.accept(schemaStatVisitor);
        Map<TableStat.Name, TableStat> tables = schemaStatVisitor.getTables();
        if (Objects.nonNull(tables)) {
            tables.forEach(((name, tableStat) -> {
                if (tableStat.getCreateCount() > 0 || tableStat.getInsertCount() > 0) {
                    FlinkTable table = tableMap.get(name.getName());
                    String tabName=(table.getDatasourceId()+"."+table.getDbName()+"."+table.getTableName()).toLowerCase();
                    dataRelated.setTargetTable(tabName);
                    Set<PrivilegeType> privilegeTypes = dataRelated.getNeedPriveleges().getOrDefault(tabName, new HashSet<PrivilegeType>());
                    privilegeTypes.add(PrivilegeType.INSERT);
                    dataRelated.getNeedPriveleges().put(tabName,privilegeTypes);
                } else  if (tableStat.getSelectCount() > 0) {
                    FlinkTable table = tableMap.get(name.getName());
                    String tabName=(table.getDatasourceId()+"."+table.getDbName()+"."+table.getTableName()).toLowerCase();
                    dataRelated.getSourceTables().add(tabName);
                    Set<PrivilegeType> privilegeTypes = dataRelated.getNeedPriveleges().getOrDefault(tabName, new HashSet<PrivilegeType>());
                    privilegeTypes.add(PrivilegeType.SELECT);
                    dataRelated.getNeedPriveleges().put(tabName,privilegeTypes);
                }
            }));
        }
    }

    @Override
    public List<DataRelated> dataRelatedParser(SqlTaskContent sqlTaskContent) {
        return null;
    }
}

