/*
 *
 *  Licensed to the Apache Software Foundation (ASF) under one or more
 *  contributor license agreements.  See the NOTICE file distributed with
 *  this work for additional information regarding copyright ownership.
 *  The ASF licenses this file to You under the Apache License, Version 2.0
 *  (the "License"); you may not use this file except in compliance with
 *  the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 *
 */

package t20250228_sqlparse.lineage;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.logical.LogicalAggregate;
import org.apache.calcite.rel.logical.LogicalFilter;
import org.apache.calcite.rel.logical.LogicalProject;
import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.calcite.rel.metadata.JaninoRelMetadataProvider;
import org.apache.calcite.rel.metadata.RelColumnOrigin;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.metadata.RelMetadataQueryBase;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.util.SqlBasicVisitor;
import org.apache.commons.collections.CollectionUtils;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.sql.parser.impl.FlinkSqlParserImpl;
import org.apache.flink.table.api.TableColumn;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl;
import org.apache.flink.table.catalog.CatalogTable;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.ResolvedCatalogTable;
import org.apache.flink.table.operations.CatalogSinkModifyOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.operations.ddl.CreateTableOperation;
import org.apache.flink.table.planner.operations.PlannerQueryOperation;
import org.apache.flink.table.planner.plan.metadata.FlinkDefaultRelMetadataProvider;
import org.apache.flink.table.planner.plan.schema.TableSourceTable;

import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

@Slf4j
public class LineageContext {

    public static final String SQL_EMPTY_STR = "[\\s\\t\\n\\r]";

    private final StreamTableEnvironmentImpl tableEnv;

    private final Map<ObjectIdentifier, Map<String, String>> tableOptionMap;

    public LineageContext(StreamTableEnvironment tableEnv) {
        this.tableEnv = (StreamTableEnvironmentImpl)tableEnv;
        this.tableOptionMap = new HashMap<>();
    }

    public List<LineageRel> analyzeLineage(List<SingleSql> sqls) {

        List<LineageRel> lineageRelList = new ArrayList<>();
        /*
         * Since TableEnvironment is not thread-safe, add this sentence to solve it. Otherwise, NullPointerException
         * will appear when org.apache.calcite.rel.metadata.RelMetadataQuery.<init>
         * http://apache-flink.370.s1.nabble.com/flink1-11-0-sqlQuery-NullPointException-td5466.html
         */
        RelMetadataQueryBase.THREAD_PROVIDERS
                .set(JaninoRelMetadataProvider.of(FlinkDefaultRelMetadataProvider.INSTANCE()));

        for (SingleSql ssql : sqls) {
            String sql = ssql.getSqlSource();
            log.info("analyze sql:{}", sql);
            SqlType operationType = getOperationType(sql);
            if (operationType.equals(SqlType.INSERT)) {
                // 1. Generate original relNode tree
                Tuple2<String, RelNode> parsed = parseStatement(sql);
                String sinkTable = parsed.getField(0);
                RelNode oriRelNode = parsed.getField(1);

                // 2. Build lineage based from RelMetadataQuery
                List<LineageRel> lineageRels = buildFiledLineageResult(sinkTable, oriRelNode);
                lineageRelList.addAll(lineageRels);

            } else if (!operationType.equals(SqlType.SELECT) && !operationType.equals(SqlType.PRINT)) {
                tableEnv.executeSql(sql);
                if (operationType.equals(SqlType.CREATE)) {
                    Operation operation = tableEnv.getParser().parse(sql).get(0);
                    if (operation instanceof CreateTableOperation) {
                        CreateTableOperation createTableOperation = (CreateTableOperation) operation;
                        CatalogTable catalogTable = createTableOperation.getCatalogTable();

                        TableSchema schema = catalogTable.getSchema();
                        // 获取到所有列
                        List<TableColumn> tableColumns = schema.getTableColumns();

                        Map<String, String> options = catalogTable.getOptions();
                        ObjectIdentifier tableIdentifier = createTableOperation.getTableIdentifier();
                        tableOptionMap.put(tableIdentifier, options);
                    }
                }

            }
        }
        return lineageRelList;
    }

    private Tuple2<String, RelNode> parseStatement(String sql) {
        List<Operation> operations = tableEnv.getParser().parse(sql);

        if (operations.size() != 1) {
            throw new TableException("Unsupported SQL query! only accepts a single SQL statement.");
        }
        Operation operation = operations.get(0);
        if (operation instanceof CatalogSinkModifyOperation) {
            CatalogSinkModifyOperation sinkOperation = (CatalogSinkModifyOperation) operation;
            PlannerQueryOperation queryOperation = (PlannerQueryOperation) sinkOperation.getChild();
            RelNode relNode = queryOperation.getCalciteTree();

            // 递归取出input
            reverRelNode(relNode);

            return new Tuple2<>(sinkOperation.getTableIdentifier().asSummaryString(), relNode);
        } else {
            throw new TableException("Only insert is supported now.");
        }
    }

    List<String> level = new ArrayList<>();
    Map<String, List<Tuple2<Integer, String>>> sourceFieldMap = new HashMap<>();
    private void reverRelNode(RelNode relNode) {

        if (relNode == null) {
            return;
        }

        List<RelNode> inputs = relNode.getInputs();
        for (RelNode input : inputs) {
            reverRelNode(input);
        }

        int currentNodeId = relNode.getId();
        System.out.println("reverRelNode:[" + currentNodeId + "]" + " relNode:" + relNode.getClass());
        List<RelDataTypeField> fieldList = relNode.getRowType().getFieldList();
        if (relNode instanceof LogicalProject) {
            LogicalProject project = (LogicalProject) relNode;
            if (relNode.getInputs().size() != 1) {
                System.out.println("size not eq 1");
            }
            RelNode input0 = relNode.getInputs().get(0);
            int sourceNodeId = input0.getId();
            List<RelDataTypeField> inputFieldList = input0.getRowType().getFieldList();

            for (int i = 0; i < project.getProjects().size(); i++) {
                RexNode rexNode = project.getProjects().get(i);
                if (rexNode instanceof RexInputRef) {
                    RexInputRef rir = (RexInputRef)rexNode;
                    int index = rir.getIndex();
                    RelDataTypeField relDataTypeField = inputFieldList.get(index);

                    List<String> unUsedColumns = new ArrayList<>();
                    for (Map.Entry<String, List<Tuple2<Integer, String>>> entry : sourceFieldMap.entrySet()) {
                        boolean isUsed = false;
                        for (Tuple2<Integer, String> t2 : entry.getValue()) {
                            if (t2.f0 == sourceNodeId && t2.f1.equals(relDataTypeField.getName())) {
                                isUsed = true;
                                break;
                            }
                        }
                        if (isUsed) {
                            entry.getValue().add(new Tuple2<>(currentNodeId, fieldList.get(i).getName()));
                        } else {
                            // 该字段引用断开
                            unUsedColumns.add(entry.getKey());
                        }
                    }


                    String msg = String.format("  reverRelNode:%s[%d] => %s[%d]", fieldList.get(i).getName(), currentNodeId, relDataTypeField.getName(), sourceNodeId);
                    System.out.println(msg);
                }
            }
        } if (relNode instanceof LogicalAggregate) {
            LogicalAggregate agg = (LogicalAggregate) relNode;
            // todo
        }else if (relNode instanceof LogicalTableScan) {
            // 由于源表的字段会经过一层一层的转换，因此可以创建一个Map<String,List<String>，字段为key,所有经过转换得到的字段是一个数组作为value
            LogicalTableScan tableScan = (LogicalTableScan)relNode;
            for (RelDataTypeField relDataTypeField : tableScan.getRowType().getFieldList()) {
                List<Tuple2<Integer, String>> list = new ArrayList<>();
                list.add(new Tuple2<>(currentNodeId, relDataTypeField.getName()));
                sourceFieldMap.put(relDataTypeField.getName(), list);
            }
        } else if (relNode instanceof LogicalFilter) {
            LogicalFilter filter = (LogicalFilter)relNode;
            //todo
            System.out.println("");
        } else {
            // copy
        }





    }

    private void ColumnClipping(RelNode relNode) {


    }

    /** Check the size of query and sink fields match */
    private void validateSchema(String sinkTable, RelNode relNode, List<String> sinkFieldList) {
        List<String> queryFieldList = relNode.getRowType().getFieldNames();
        if (queryFieldList.size() != sinkFieldList.size()) {
            throw new ValidationException(String.format(
                    "Column types of query result and sink for %s do not match.\n"
                            + "Query schema: %s\n"
                            + "Sink schema:  %s",
                    sinkTable, queryFieldList, sinkFieldList));
        }
    }

    /**
     *
     * @param sinkTable
     * @param optRelNode insert into 的关系代数
     * @return
     */
    private List<LineageRel> buildFiledLineageResult(String sinkTable, RelNode optRelNode) {
        // target columns
        List<String> targetColumnList =
                tableEnv.from(sinkTable).getResolvedSchema().getColumnNames();

        // check the size of query and sink fields match
        validateSchema(sinkTable, optRelNode, targetColumnList);

        RelMetadataQuery metadataQuery = optRelNode.getCluster().getMetadataQuery();
        List<LineageRel> resultList = new ArrayList<>();


        for (int index = 0; index < targetColumnList.size(); index++) {
            String targetColumn = targetColumnList.get(index);

            Set<RelColumnOrigin> relColumnOriginSet = metadataQuery.getColumnOrigins(optRelNode, index);

            if (CollectionUtils.isNotEmpty(relColumnOriginSet)) {
                for (RelColumnOrigin relColumnOrigin : relColumnOriginSet) {
                    // table
                    RelOptTable table = relColumnOrigin.getOriginTable();
                    String sourceTable = String.join(".", table.getQualifiedName());

                    // filed
                    int ordinal = relColumnOrigin.getOriginColumnOrdinal();
                    ResolvedCatalogTable resolvedCatalogTable = ((TableSourceTable) table).catalogTable();

                    List<String> columnNames = resolvedCatalogTable.getResolvedSchema().getColumnNames();
                    String sourceColumn = columnNames.get(ordinal);

                    // add record
                    resultList.add(LineageRel.build(
                            sourceTable, sourceColumn,
                            sinkTable, targetColumn,
                            relColumnOrigin.getTransform()));
                }
            }
        }
        return resultList;
    }

    /**
    [
    {
        "refFields": [

        ],
        "targetField": {
            "fieldName": "dws.dws_comm_shop_linkshop_da.cal_date",
            "final": false,
            "index": 0,
            "level": 0
        }
    },
    {
        "refFields": [
            {
                "fieldName": "dim.dim_pub_shop_label_info_da.shop_brand_code",
                "final": true,
                "index": 1,
                "level": 1
            },
            {
                "fieldName": "dim.dim_pub_shop_base_info_da.brand_code",
                "final": true,
                "index": 1,
                "level": 1
            }
        ],
        "targetField": {
            "fieldName": "dws.dws_comm_shop_linkshop_da.brand_code",
            "final": false,
            "index": 0,
            "level": 0
        }
    }
]
     */
    public JSONObject buildLineage(List<LineageRel> sourceList) {

        // <目标列，来源列>
        Map<Field,List<Field>> fieldMap = new HashMap<>();
        // <表, <入度，出度>>
        Map<String, Tuple2<Integer, Integer>> duMap = new HashMap<>();
        // 直接相连的表
        Set<Tuple2<String, String>> table2table = new HashSet<>();
        // <表，level>
        Map<String, Integer> levelMap = new ConcurrentHashMap<>();
        Set<String> noMarkLevel = new HashSet<>();

        for (LineageRel rel : sourceList) {
            Field targetField = new Field(rel.getTargetCatalog(), rel.getTargetDatabase(), rel.getTargetTable(),
                rel.getTargetColumn(), -1, 0, false);
            Field sourceField = new Field(rel.getSourceCatalog(), rel.getSourceDatabase(), rel.getSourceTable(),
                rel.getSourceColumn(), -1, 0, false);

            table2table.add(Tuple2.of(rel.getSourceTablePath(), rel.getTargetTablePath()));

            noMarkLevel.add(rel.getSourceTablePath());
            noMarkLevel.add(rel.getTargetTablePath());

            fieldMap.compute(targetField, (k,v) -> {
                if (v == null) {
                    v = new ArrayList<>();
                }
                v.add(sourceField);
                return v;
            });

            // 来源表的出度+1
            duMap.compute(rel.getSourceTablePath(), (k, v) -> {
                if (v == null) {
                    return new Tuple2<>(0, 1);
                }
                v.f1++;
                return v;
            });

            // 目标表的入度+1
            duMap.compute(rel.getTargetTablePath(), (k, v) -> {
                if (v == null) {
                    return new Tuple2<>(1, 0);
                }
                v.f0++;
                return v;
            });
        }
        for (Map.Entry<String, Tuple2<Integer, Integer>> keyTuple2Entry : duMap.entrySet()) {
            String tablePath = keyTuple2Entry.getKey();
            Tuple2<Integer, Integer> du = keyTuple2Entry.getValue();
            // 出度为0的表为目标表 在最右边
            if (du.f1 == 0) {
                levelMap.put(tablePath, 0);
                noMarkLevel.remove(tablePath);
            }
        }

        //TODO 这段代码看起来好复杂 优化it
        int maxLevel = 0;
        try {
            while (!noMarkLevel.isEmpty()) {
                for (Tuple2<String, String> tuple2 : table2table) {
                    for (Map.Entry<String, Integer> levelMapEntry : levelMap.entrySet()) {
                        if (tuple2.f1.equals(levelMapEntry.getKey())) {
                            int currentLevel = levelMapEntry.getValue() + 1;
                            levelMap.put(tuple2.f0, currentLevel);
                            noMarkLevel.remove(tuple2.f0);
                            if (currentLevel > maxLevel) {
                                maxLevel = currentLevel;
                            }
                        }
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

        JSONArray lineage = new JSONArray();
        Map<Integer, Integer> levelIndexMap = new HashMap<>(maxLevel);
        // <table, index>
        Map<String, Integer> tableIndexMap = new HashMap<>();
        for (int i = 0; i <= maxLevel; i++) {
            levelIndexMap.put(i, 0);
        }

        Set<Field> levelMaxFields = new HashSet<>();
        for (Map.Entry<Field, List<Field>> fieldListEntry : fieldMap.entrySet()) {
            Field target = fieldListEntry.getKey();
            target.setLevel(levelMap.get(target.getTablePath()));

            if (target.getLevel() == maxLevel) {
                target.setOuterLayer(true);
            }
            if (target.getIndex() == -1) {
                Integer tableIndex = tableIndexMap.get(target.getTablePath());
                if (tableIndex == null) {
                    tableIndex = levelIndexMap.get(target.getLevel());
                    tableIndexMap.put(target.getTablePath(), tableIndex);
                    levelIndexMap.put(target.getLevel(), tableIndex + 1);
                }
                target.setIndex(tableIndex);
            }

            List<Field> source = fieldListEntry.getValue();
            for (Field field : source) {
                field.setLevel(levelMap.get(field.getTablePath()));
                if (field.getLevel() == maxLevel) {
                    field.setOuterLayer(true);
                    levelMaxFields.add(field);
                }
                if (field.getIndex() == -1) {
                    Integer tableIndex = tableIndexMap.get(field.getTablePath());
                    if (tableIndex == null) {
                        tableIndex = levelIndexMap.get(field.getLevel());
                        tableIndexMap.put(field.getTablePath(), tableIndex);
                        levelIndexMap.put(field.getLevel(), tableIndex + 1);
                    }
                    field.setIndex(tableIndex);
                }
            }

            JSONArray sourceArray = new JSONArray();
            sourceArray.addAll(source);

            JSONObject jsonObject = new JSONObject();
            jsonObject.put("refFields", sourceArray);
            jsonObject.put("targetField", target);
            lineage.add(jsonObject);
        }

        // 方便前端解析，对于来源表再单独声明一次
        for (Field levelMaxField : levelMaxFields) {
            JSONObject jsonObject = new JSONObject();
            jsonObject.put("refFields", new JSONArray());
            jsonObject.put("targetField", levelMaxField.clone());
            lineage.add(jsonObject);
        }

        List<Option> options = tableOptionMapAddLevelIndex(tableIndexMap, levelMap);

        JSONObject result = new JSONObject();
        result.put("lineage", lineage);
        result.put("options", options);

        log.info("lineage size:{}", lineage.size());
        return result;

    }
    private List<Option> tableOptionMapAddLevelIndex(Map<String, Integer> tableIndexMap, Map<String, Integer> tableLevelMap) {
        List<Option> list = new ArrayList<>();
        for (Map.Entry<ObjectIdentifier, Map<String, String>> entry : tableOptionMap.entrySet()) {
            Option.OptionBuilder builder = Option.builder();
            Option option = builder.flinkTablePath(getDatabaseTable(entry.getKey()))
                .tableOptions(entry.getValue())
                .level(tableLevelMap.get(entry.getKey().asSummaryString()))
                .index(tableIndexMap.get(entry.getKey().asSummaryString()))
                .build();
            list.add(option);
        }
        return list;
    }

    public static SqlType getOperationType(String sql) {
        String sqlTrim = sql.replaceAll(SQL_EMPTY_STR, " ").trim().toUpperCase();
        return Arrays.stream(SqlType.values())
                .filter(sqlType -> sqlType.match(sqlTrim))
                .findFirst()
                .orElse(SqlType.UNKNOWN);
    }


    private static String getDatabaseTable(ObjectIdentifier identifier) {
        return identifier.getDatabaseName() + "." + identifier.getObjectName();
    }
}
