package org.example;

import com.linkedin.common.UrnArray;
import com.linkedin.common.urn.DatasetFieldUrn;
import com.linkedin.common.urn.DatasetUrn;
import com.linkedin.common.urn.Urn;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.dataset.*;
import com.linkedin.metadata.aspect.patch.builder.UpstreamLineagePatchBuilder;
import com.linkedin.mxe.MetadataChangeProposal;
import datahub.client.MetadataWriteResponse;
import datahub.client.rest.RestEmitter;
import datahub.event.MetadataChangeProposalWrapper;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelVisitor;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.metadata.RelColumnOrigin;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
import org.apache.commons.collections.CollectionUtils;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.internal.TableEnvironmentImpl;
import org.apache.flink.table.api.internal.TableImpl;
import org.apache.flink.table.catalog.*;
import org.apache.flink.table.catalog.exceptions.TableNotExistException;
import org.apache.flink.table.catalog.exceptions.CatalogException;
import org.apache.flink.table.delegation.InternalPlan;
import org.apache.flink.table.module.ModuleManager;
import org.apache.flink.table.operations.*;
import org.apache.flink.table.planner.calcite.FlinkContext;
import org.apache.flink.table.planner.calcite.FlinkRelBuilder;
import org.apache.flink.table.planner.calcite.RexFactory;
import org.apache.flink.table.planner.delegation.PlannerBase;
import org.apache.flink.table.planner.operations.PlannerQueryOperation;
import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram;
import org.apache.flink.table.planner.plan.optimize.program.FlinkStreamProgram;
import org.apache.flink.table.planner.plan.optimize.program.StreamOptimizeContext;
import org.apache.flink.table.planner.plan.trait.MiniBatchInterval;
import org.example.model.ColumnLineage;
import org.example.model.TableLineage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

public class FlinkDDLParser {

    private Logger LOG = LoggerFactory.getLogger(FlinkDDLParser.class);

    private

   static TableEnvironmentImpl tableEnv;

    public static List<TableLineage> parseSql(String sql){
        List<TableLineage> tableLineages = new ArrayList<>();
        EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();
        tableEnv = (TableEnvironmentImpl)TableEnvironment.create(settings);

        try {

            String[] sqls = sql.split(";");
            FlinkDDLParser flinkDDLParser= new FlinkDDLParser();

            for (String curSql : sqls) {
                curSql = curSql.trim();
                if (curSql.contains("CREATE") ){
                    tableEnv.executeSql(curSql);
                }

                if (curSql.contains("insert")){
                    TableLineage tableLineage =  flinkDDLParser.parseFieldLineage(curSql);

                    tableLineages.add(tableLineage);
                }
            }
     ;

        } catch (SqlParserException  | CatalogException e) {
            e.printStackTrace();
        }
        
        return tableLineages;
    }






    public TableLineage parseFieldLineage(String sql) {

        // 1. Generate original relNode tree
        Tuple2<String, RelNode> parsed = parseStatement(sql);
        String sinkTable = parsed.getField(0);
        RelNode oriRelNode = parsed.getField(1);
//        LOG.info("Original RelNode: \n {}", oriRelNode.explain());

        PlannerBase plannerBase = ((PlannerBase)tableEnv.getPlanner());
        RelNode optRelNode = plannerBase.optimize(oriRelNode);

//        LOG.info("Optimized RelNode: \n {}", optRelNode.explain());
        // Extract the transformation expressions
//        TransformationExtractor extractor = new TransformationExtractor();
//        extractor.go(oriRelNode);

//        RelNode relNode = plannerBase.optimize(oriRelNode);

//        extractor.go(relNode);
        // 3. Build lineage based from RelMetadataQuery
        return buildFiledLineageResult(sinkTable, oriRelNode,oriRelNode);

    }
    private Tuple2<String, RelNode> parseStatement(String sql) {
        List<Operation> operations = tableEnv.getParser().parse(sql);

        if (operations.size() != 1) {
            throw new TableException(
                    "Unsupported SQL query! only accepts a single SQL statement.");
        }
        Operation operation = operations.get(0);
        if (operation instanceof SinkModifyOperation) {
            SinkModifyOperation sinkOperation = (SinkModifyOperation) operation;

            PlannerQueryOperation queryOperation = (PlannerQueryOperation) sinkOperation.getChild();
            RelNode relNode = queryOperation.getCalciteTree();
            return new Tuple2<>(
                    sinkOperation.getContextResolvedTable().getIdentifier().getObjectName(),
                    relNode);
        } else {
            throw new TableException("Only insert is supported now.");
        }

    }



    private TableLineage buildFiledLineageResult(String sinkTable, RelNode optRelNode,RelNode oriRelNode) {
        // target columns
        List<String> targetColumnList = tableEnv.from(sinkTable)
                .getResolvedSchema()
                .getColumnNames();

        RelMetadataQuery metadataQuery = optRelNode.getCluster().getMetadataQuery();
        Map<String,TableLineage> tableMap = new HashMap<>();

        for (String table : tableEnv.listTables()) {
            TableImpl table1 =  (TableImpl)tableEnv.from(table);
            SourceQueryOperation sourceQueryOperation =  (SourceQueryOperation)table1.getQueryOperation();

            ContextResolvedTable contextResolvedTable = sourceQueryOperation.getContextResolvedTable();
            Map<String, String> op1 =  contextResolvedTable.getResolvedTable().getOptions();

            TableLineage tableLineage = new TableLineage();
            tableLineage.setTableName(sinkTable);
            tableLineage.setOptions(op1);

            tableMap.put(table,tableLineage);
        }

        TableLineage sinkTableLineage = tableMap.get(sinkTable);

        for (int index = 0; index < targetColumnList.size(); index++) {
            String targetColumn = targetColumnList.get(index);
            List<ColumnLineage> lineageInfoList = new ArrayList<>();

            sinkTableLineage.addColumnLineages(targetColumn,lineageInfoList);

            Set<RelColumnOrigin> relColumnOriginSet = metadataQuery.getColumnOrigins(optRelNode, index);

            if (CollectionUtils.isNotEmpty(relColumnOriginSet)) {
                for (RelColumnOrigin relColumnOrigin : relColumnOriginSet) {
                    // table
                    RelOptTable table = relColumnOrigin.getOriginTable();
//                    String sourceTable = String.join(".", table.getQualifiedName());
                    List<String> names = table.getQualifiedName();
                    String sourceTable =  names.get(names.size()-1);

                    // filed
                    int ordinal = relColumnOrigin.getOriginColumnOrdinal();
                    List<String> fieldNames = table.getRowType().getFieldNames();
                    String sourceColumn = fieldNames.get(ordinal);
                    ColumnLineage columnLineage = new ColumnLineage();
                    columnLineage.setSourceTable(tableMap.get(sourceTable));
                    columnLineage.setSourceColumn(sourceColumn);

                    lineageInfoList.add(columnLineage);

                }
            }
        }


        return sinkTableLineage;
    }

    private  List<RexNode> nodes  = new ArrayList<>();

    private  class TransformationExtractor extends RelVisitor {
        @Override
        public void visit(RelNode node, int ordinal, RelNode parent) {
            System.out.println(node);
            if (node instanceof  Project){
                Project project = (Project) node;
                for (int i = 0; i < project.getProjects().size(); i++) {
                    RexNode expr = project.getProjects().get(i);
                    String fieldName = project.getRowType().getFieldNames().get(i);

                    if (expr instanceof RexInputRef) {
                        RexInputRef inputRef = (RexInputRef) expr;
                        String sourceField = project.getInput().getRowType().getFieldNames().get(inputRef.getIndex());
//                        transformations.put(fieldName, sourceField);
                    } else if (expr instanceof RexCall) {
                        String expression = expr.toString();

                        System.out.println(expression);
//                        transformations.put(fieldName, expression);
                    }
                }
            }

            super.visit(node, ordinal, parent);
        }
    }


}
