package com.hive;

import com.hive.jdbc.impl.HIveJdbcServiceImpl;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
import org.apache.calcite.adapter.java.ReflectiveSchema;
import org.apache.calcite.adapter.jdbc.JdbcSchema;
import org.apache.calcite.avatica.com.fasterxml.jackson.databind.node.JsonNodeFactory;
import org.apache.calcite.avatica.com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.calcite.config.CalciteConnectionConfigImpl;
import org.apache.calcite.config.CalciteConnectionProperty;
import org.apache.calcite.config.Lex;
import org.apache.calcite.jdbc.CalciteConnection;
import org.apache.calcite.jdbc.CalcitePrepare;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.server.CalciteServerStatement;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorUtil;
import org.apache.calcite.tools.FrameworkConfig;
import org.apache.calcite.tools.Frameworks;
import org.apache.hadoop.hbase.util.Triple;
import org.datanucleus.store.rdbms.datasource.dbcp.BasicDataSource;

/**
 * ClassName CalciteDemo Description TODO
 *
 * @author qq
 * @date 2023/7/27 10:27
 * @since JDK 1.8
 */
public class CalciteHiveDemo {

  public static void main(String[] args) throws SqlParseException {
    String sql = "select * from hive_1.testtable1 ";
    valid(sql);
  }


  public static class TestSchema {

    public final Triple[] rdf = {new Triple("s", "p", "o")};

  }

  public static void valid(String sql) throws SqlParseException {

    SchemaPlus schemaPlus = Frameworks.createRootSchema(true);

    schemaPlus.add("T", new ReflectiveSchema(new TestSchema()));
    Frameworks.ConfigBuilder configBuilder = Frameworks.newConfigBuilder();
    configBuilder.defaultSchema(schemaPlus);

    FrameworkConfig frameworkConfig = configBuilder.build();

    SqlParser.ConfigBuilder paresrConfig = SqlParser.configBuilder(
        frameworkConfig.getParserConfig());
    SqlParser.Config config = paresrConfig.setCaseSensitive(false).build();

    // 解析配置 - mysql设置
    SqlParser.Config mysqlConfig = SqlParser.configBuilder().setLex(Lex.MYSQL).build();
    // 创建解析器
    SqlParser parser = SqlParser.create(sql, mysqlConfig);
    // Sql语句
    // 解析sql
    SqlNode sqlNode = parser.parseQuery(sql);

    Statement stat = new HIveJdbcServiceImpl().getStatement();
    CalcitePrepare.Context prepareContext = null;
    try {
      CalciteServerStatement cstat = stat.unwrap(CalciteServerStatement.class);
      prepareContext = cstat.createPrepareContext();
    } catch (SQLException e) {
      e.printStackTrace();
    }
//这里需要注意大小写问题，否则表会无法找到
    Properties properties = new Properties();
    properties.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName(),
        String.valueOf(config.caseSensitive()));

    SqlTypeFactoryImpl factory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
    CalciteCatalogReader calciteCatalogReader = new CalciteCatalogReader(
        prepareContext.getRootSchema(),
        prepareContext.getDefaultSchemaPath(),
        factory,
        new CalciteConnectionConfigImpl(properties));
// 校验（包括对表名，字段名，函数名，字段类型的校验。）
    SqlValidator validator = SqlValidatorUtil.newValidator(SqlStdOperatorTable.instance(),
        calciteCatalogReader, factory,SqlValidator.Config.DEFAULT);
// 校验后的SqlNode
    SqlNode validateSqlNode = validator.validate(sqlNode);

  }
}
