package com.archgeek.calcite;

import org.apache.calcite.adapter.csv.CsvSchema;
import org.apache.calcite.adapter.csv.CsvTable;
import org.apache.calcite.config.CalciteConnectionConfigImpl;
import org.apache.calcite.config.CalciteConnectionProperty;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
import org.apache.calcite.plan.ConventionTraitDef;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.rules.CoreRules;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.SqlExplainFormat;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.impl.SqlParserImpl;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorUtil;
import org.apache.calcite.sql.validate.SqlValidatorWithHints;
import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.calcite.sql2rel.StandardConvertletTable;
import org.apache.calcite.tools.Frameworks;
import org.checkerframework.checker.nullness.qual.Nullable;

import java.io.File;
import java.util.List;
import java.util.Properties;

/**
 * @author pizhihui
 * @date 2024-05-30 12:52
 */
public class C1_PrintPlan {


    public void doRun() throws SqlParseException {


        /**
         * ================ 构建 schema
         */
        SchemaPlus schema = Frameworks.createRootSchema(true);
        File file = new File(getClass().getClassLoader().getResource("").getPath());

        CsvSchema csvSchema = new CsvSchema(file, CsvTable.Flavor.SCANNABLE);
        schema.add("orders", csvSchema.getTable("orders"));
        schema.add("consumers", csvSchema.getTable("consumers"));

        System.out.println(schema.getTableNames());


        /**
         * 验证 sql
         */
        String sql =
                "select o.id,o.goods,o.price,o.amount,c.firstname,c.lastname " +
                        "from orders as o " +
                        "left outer join consumers c on o.user_id = c.id " +
                        "where o.amount > 30 " +
                        "order by o.id " +
                        "limit 5 ";

        /**
         * 解析 sql ,生成 SqlNode 信息
         */
//        SqlParser.Config config = SqlParser.configBuilder()
//                .setCaseSensitive(false)
//                .build();
        SqlParser.Config config = SqlParser.config()
                .withCaseSensitive(false)
                .withParserFactory(SqlParserImpl.FACTORY)
                ;
        SqlParser sqlParser = SqlParser.create(sql, config);
        SqlNode sqlNodeParsed = sqlParser.parseQuery();
        System.out.println("[parsed sqlNode]");
        System.out.println(sqlNodeParsed);


        JavaTypeFactoryImpl typeFactory = new JavaTypeFactoryImpl();
        Properties properties = new Properties();
        properties.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), "false");

        CalciteCatalogReader catalogReader = new CalciteCatalogReader(
                CalciteSchema.from(schema),
                CalciteSchema.from(schema).path(null),
                typeFactory,
                new CalciteConnectionConfigImpl(properties));

        SqlValidatorWithHints validator = SqlValidatorUtil.newValidator(
                SqlStdOperatorTable.instance(),
                catalogReader,
                typeFactory,
                SqlValidator.Config.DEFAULT
        );
        SqlNode sqlNodeValidated = validator.validate(sqlNodeParsed);
        System.out.println();
        System.out.println("[validated sqlNode]");
        System.out.println(sqlNodeValidated);
        /**
         * 逻辑计划和物理计划
         *
         *
         */


        RexBuilder rexBuilder = new RexBuilder(typeFactory);
        HepProgramBuilder hepProgramBuilder = new HepProgramBuilder();
        hepProgramBuilder.addRuleInstance(CoreRules.FILTER_INTO_JOIN);

        HepPlanner hepPlanner = new HepPlanner(hepProgramBuilder.build());
        hepPlanner.addRelTraitDef(ConventionTraitDef.INSTANCE);

        RelOptCluster relOptCluster = RelOptCluster.create(hepPlanner, rexBuilder);
        SqlToRelConverter sqlToRelConverter = new SqlToRelConverter(
                new RelOptTable.ViewExpander() {
                    @Override
                    public RelRoot expandView(RelDataType rowType, String queryString, List<String> schemaPath, @Nullable List<String> viewPath) {
                        return null;
                    }
                },
                validator,
                catalogReader,
                relOptCluster,
                StandardConvertletTable.INSTANCE,
                SqlToRelConverter.config()
        );
        RelRoot logicalPlan = sqlToRelConverter.convertQuery(sqlNodeValidated, false, false);

        System.out.println();
        System.out.println(RelOptUtil.dumpPlan("[Logical Plan]", logicalPlan.rel, SqlExplainFormat.TEXT, SqlExplainLevel.NON_COST_ATTRIBUTES));

        hepPlanner.setRoot(logicalPlan.rel);
        RelNode phyPlan = hepPlanner.findBestExp();
        System.out.println(RelOptUtil.dumpPlan("[Physical paln]", phyPlan, SqlExplainFormat.TEXT, SqlExplainLevel.NON_COST_ATTRIBUTES));

    }

    public static void main(String[] args) throws SqlParseException {

        C1_PrintPlan run = new C1_PrintPlan();
        run.doRun();
    }

}
