package com.duoec.dw.lineage.service.impl;

import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.dialect.hive.parser.HiveStatementParser;
import com.alibaba.druid.sql.parser.ParserException;
import com.duoec.dw.lineage.core.utils.LineageMergeUtils;
import com.duoec.dw.lineage.core.utils.ScriptFormatUtils;
import com.duoec.dw.lineage.exceptions.LineageAnalyseException;
import com.duoec.dw.lineage.service.LineageAnalyseService;
import com.duoec.dw.lineage.service.LineageTableService;
import com.duoec.dw.lineage.core.utils.AnalyserUtils;
import com.duoec.dw.lineage.core.utils.ScriptInfoUtils;
import com.duoec.dw.lineage.dto.DropTableLineageDto;
import com.duoec.dw.lineage.dto.ScriptLineageDto;
import com.duoec.dw.lineage.dto.SqlScriptDto;
import com.duoec.dw.lineage.dto.TableLineageDto;
import com.duoec.dw.lineage.service.HiveAnalyser;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import java.util.LinkedList;
import java.util.List;
import java.util.stream.Collectors;

/**
 * @author xuwenzhen
 */
@Service
public class LineageAnalyseServiceDruidImpl implements LineageAnalyseService {
    private static final Logger logger = LoggerFactory.getLogger(LineageAnalyseServiceDruidImpl.class);
    public static final String STR_TMP = "tmp";

    @Autowired
    private HiveAnalyser hiveAnalyser;

    /**
     * 解析脚本，生成血缘关系数据
     *
     * @param script 脚本
     * @return 由脚本解析出来的信息
     */
    @Override
    public ScriptLineageDto analyseScript(String script) {
        return ScriptFormatUtils.wrap(script, sql -> doAnalyse(script, sql));
    }

    private ScriptLineageDto doAnalyse(String orgScript, String fmtScript) {
        ScriptLineageDto scriptLineageDto = new ScriptLineageDto();
        LinkedList<TableLineageDto> lineages = Lists.newLinkedList();
        scriptLineageDto.setLineages(lineages);

        HiveStatementParser parser;
        List<SQLStatement> statementList;
        try {
            parser = new HiveStatementParser(fmtScript);
            statementList = parser.parseStatementList();
        } catch (ParserException | LineageAnalyseException e) {
            logger.error("解析失败：script: {}", fmtScript, e);
            throw e;
        }

        SqlScriptDto scriptInfo = ScriptInfoUtils.analyse(orgScript);
        for (SQLStatement stm : statementList) {
            TableLineageDto dto;
            try {
                dto = hiveAnalyser.analyseStatement(stm);
            } catch (LineageAnalyseException e) {
                scriptLineageDto.setErrorList(Lists.newArrayList(e.getMessage()));
                return scriptLineageDto;
            }
            if (dto == null) {
                continue;
            }
            dto.setScript(scriptInfo);
            if (STR_TMP.equals(dto.getDatabaseName())) {
                //通过表名，判断是否是临时表
                dto.setTableType(1);
            }
            switch (dto.getStatementType()) {
                case INSERT_TABLE_DATA:
                case SELECT_TABLE:
                    //一个脚本可能会对同一个表进行多次操作，需要合并
                    LineageMergeUtils.addOrMerge(lineages, dto);
                    break;
                case CREATE_TABLE:
                    LineageMergeUtils.addOrMerge(lineages, dto);
                    LineageTableService.addTableDto(dto);
                    break;
                case DROP_TABLE:
                    if (dto instanceof DropTableLineageDto) {
                        dto.setTableType(2);
                        ((DropTableLineageDto) dto)
                                .getDropList()
                                .forEach(table -> LineageTableService.removeTableDto(table.toString()));

                    }
                    break;
                default:
                    break;
            }
        }


        //将有效的血缘关系表从临时表名单中删除，防止同一线程多次调用时，影响到
        lineages.forEach(lineage -> LineageTableService.removeTableDto(lineage.getFullTableName()));

        if (lineages.size() > 1) {
            //TODO 需要将血缘关系中的临时表抽取掉，tableType=2



            List<TableLineageDto> lineageList = lineages.stream().filter(lineage -> lineage.getTableType().equals(0)).collect(Collectors.toList());
            if (lineageList.size() > 1) {
                String tableNames = lineageList.stream().map(TableLineageDto::getFullTableName).collect(Collectors.joining(", "));
                logger.warn("一个脚本包含多个表数据写入: {}", tableNames);
            }
        }
        scriptLineageDto.setVars(AnalyserUtils.analyseVars(orgScript));
        scriptLineageDto.setJars(AnalyserUtils.analyseJars(orgScript));
        return scriptLineageDto;
    }

    /**
     * 解析单条hive sql脚本，生成血缘关系数据
     *
     * @param hql 单条语句
     * @return 血缘关系数据
     */
    @Override
    public TableLineageDto analyseSingleHiveSql(String hql) {
        List<TableLineageDto> tables = ScriptFormatUtils.wrap(hql, sql -> {
            HiveStatementParser statementParser = new HiveStatementParser(sql);
            List<SQLStatement> statementList = statementParser.parseStatementList();
            if (statementList == null || statementList.size() != 1) {
                throw new LineageAnalyseException("当前脚本包含语句数量不等于1！");
            }
            TableLineageDto tableLineageDto = hiveAnalyser.analyseStatement(statementList.get(0));

            //合并优化全局依赖与字段依赖
            if (tableLineageDto == null) {
                return null;
            }
            AnalyserUtils.mergeDependency(tableLineageDto);
            return Lists.newArrayList(tableLineageDto);
        });
        return CollectionUtils.isEmpty(tables) ? null : tables.get(0);
    }

    public void setHiveAnalyser(HiveAnalyser hiveAnalyser) {
        this.hiveAnalyser = hiveAnalyser;
    }
}
