package com.apex.flink.transform;

import com.apex.flink.FlinkEnvironment;
import com.apex.flink.batch.FlinkBatchTransform;
import com.apex.flink.utils.ConfigKeyName;
import com.apex.flink.utils.TableUtil;
import com.apex.flink.stream.FlinkStreamTransform;
import com.typesafe.config.Config;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.table.api.SqlParserException;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.BatchTableEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.flink.types.Row;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.List;
import java.util.Objects;

/**
 * Flink SQL 执行入口
 */
public class Sql implements FlinkBatchTransform<Row,Row>, FlinkStreamTransform<Row,Row> {
    //用户自定义结果集sql
    private String sql;
    //配置文件管理工具类
    private Config config;
    //SQL执行环境
    private StreamTableEnvironment tEnv;
    //最总查询结果
    private Table table = null;
    //启用hive catalog
    private static final String HIVE_CATALOG = "hive";
    private static String HIVE_DATABASE = "default";
    private static String HIVE_CONF_DIR;

    /**
     * 批处理执行逻辑
     * @param env 上下文执行环境
     * @param data 上游结果集
     * @return DataSet<Row>
     */
    @Override
    public DataSet<Row> processBatch(FlinkEnvironment env, DataSet<Row> data) {
        BatchTableEnvironment tableEnvironment = env.getBatchTableEnvironment();
        Table table;
        DataSet<Row> dataSet = null;
        if (!StringUtils.isBlank(sql)){
            table = tableEnvironment.sqlQuery(sql);
            dataSet = TableUtil.tableToDataSet(tableEnvironment,table);
        }
        return dataSet;
    }

    /**
     * 流处理执行逻辑
     * @param env 上下文执行环境
     * @param dataStream 上游结果集
     * @return DataSet<Row>
     */
    @Override
    public DataStream<Row> processStream(FlinkEnvironment env, DataStream<Row> dataStream) {
        tEnv = env.getStreamTableEnvironment();
        //输入的SQL脚本
       List<String> sqlAll =  env.getSqlAll();
       //返回查询结果集
        DataStream<Row> dataStream1 = null;
        //执行SQL脚本
       if (!sqlAll.isEmpty()){
           List<SqlCommandParser.SqlCommandCall> calls = SqlCommandParser.parse(sqlAll);
           for (SqlCommandParser.SqlCommandCall call : calls) {
               System.out.println("\n"+call.operands[0]);
               callCommand(call);
           }
           //判断用户是否有输入select 语句查询最终结果并写入数据库
           if (!Objects.isNull(table)){
               dataStream1 = TableUtil.tableToDataStream(tEnv, table, false);
           }
       }
       //如果输入脚本结果为空时执行最终查询结果SQL,返回结果给下游处理
       else {
           if (!StringUtils.isBlank(sql)){
               System.out.println("\n"+sql);
               table = tEnv.sqlQuery(sql);
               dataStream1 = TableUtil.tableToDataStream(tEnv, table, false);
           }
       }
        return dataStream1;
    }

    /**
     *
     * @param plugin 执行环境
     */
    @Override
    public void prepare(Object plugin) {
        if (config.hasPath(ConfigKeyName.FLINK_RUN_SQL)){
            sql = config.getString(ConfigKeyName.FLINK_RUN_SQL);
        }
        if (config.hasPath(ConfigKeyName.HIVE_CONF_DIR)){
            HIVE_CONF_DIR = config.getString(ConfigKeyName.HIVE_CONF_DIR);
        }else {
            HIVE_CONF_DIR = "/etc/hive/conf";
        }

        if (config.hasPath(ConfigKeyName.HIVE_DATABASE)){
            HIVE_DATABASE = config.getString(ConfigKeyName.HIVE_DATABASE);
        }else {
            HIVE_DATABASE = "default";
        }
    }

    /**
     *
     * @return config
     */
    @Override
    public Config getConfig() {
        return config;
    }

    /**
     *
     * @param config config
     */
    @Override
    public void setConfig(Config config) {
        this.config = config;
    }

    // --------------------------------------------------------------------------------------------

    /**
     * 执行SQL命令
     * @param cmdCall
     */
    private void callCommand(SqlCommandParser.SqlCommandCall cmdCall) {
        switch (cmdCall.command) {
            case SET:
                callSet(cmdCall);
                break;
            case USE:
                callUseDatabase(cmdCall);
                break;
            case SHOW:
                callShow(cmdCall);
                break;
            case DROP_TABLE:
                callDropTable(cmdCall);
                break;
            case CREATE_FUNCTION:
                callCreateFunction(cmdCall);
                break;
            case CREATE_DATABASE:
            case CREATE_CATALOG:
                callCreateDataBase(cmdCall);
                break;
            case CREATE_TABLE:
            case CREATE_VIEW:
                callCreateTable(cmdCall);
                break;
            case INSERT_INTO:
                callInsertInto(cmdCall);
                break;
            case INSERT_OVERWRITE:
                callInsertOverWrite(cmdCall);
                break;
            case SELECT_TABLE:
                table = callSelect(cmdCall);
                break;
            default:
                throw new RuntimeException("Unsupported command: " + cmdCall.command);
        }
    }

    /**
     * SQL环境参数
     * @param cmdCall
     */
    private void callSet(SqlCommandParser.SqlCommandCall cmdCall) {
        String key = cmdCall.operands[0];
        String value = cmdCall.operands[1];
        //创建hive方言支持
        if (value.equals("hive")){
            System.out.println("set "+key+"="+value + ";");
            HiveCatalog hiveCatalog = new HiveCatalog(HIVE_CATALOG,HIVE_DATABASE,HIVE_CONF_DIR);
            tEnv.registerCatalog(HIVE_CATALOG, hiveCatalog);
            tEnv.useCatalog(HIVE_CATALOG);
            tEnv.getConfig().getConfiguration().setString(key, value);
        }else {
            System.out.println("set "+key+"="+value + ";");
            tEnv.getConfig().getConfiguration().setString(key, value);
        }
    }
    /**
     * 执行create 语句
     * @param cmdCall
     */
    private void callCreateTable(SqlCommandParser.SqlCommandCall cmdCall) {
        String ddl = cmdCall.operands[0];
        try {
            tEnv.sqlUpdate(ddl);
        } catch (SqlParserException e) {
            throw new RuntimeException("SQL parse failed:\n" + ddl + "\n", e);
        }
    }

    /**
     * 执行CreateDataBase 语句
     * @param cmdCall
     */
    private void callCreateDataBase(SqlCommandParser.SqlCommandCall cmdCall) {
        String ddl = cmdCall.operands[0];
        try {
            tEnv.sqlUpdate(ddl);
        } catch (SqlParserException e) {
            throw new RuntimeException("SQL parse failed:\n" + ddl + "\n", e);
        }
    }
    /**
     * 执行CreateFunction 语句
     * @param cmdCall
     */
    private void callCreateFunction(SqlCommandParser.SqlCommandCall cmdCall) {
        String ddl = cmdCall.operands[0];
        try {
            tEnv.sqlUpdate(ddl);
        } catch (SqlParserException e) {
            throw new RuntimeException("SQL parse failed:\n" + ddl + "\n", e);
        }
    }
    /**
     * 执行Drop 语句
     * @param cmdCall
     */
    private void callDropTable(SqlCommandParser.SqlCommandCall cmdCall) {
        String ddl = cmdCall.operands[0];
        try {
            tEnv.executeSql(ddl).print();
        } catch (SqlParserException e) {
            throw new RuntimeException("SQL parse failed:\n" + ddl + "\n", e);
        }
    }
    /**
     * 执行Use 语句
     * @param cmdCall
     */
    private void callUseDatabase(SqlCommandParser.SqlCommandCall cmdCall) {
        String ddl = cmdCall.operands[0];
        try {
            tEnv.sqlUpdate(ddl);
        } catch (SqlParserException e) {
            throw new RuntimeException("SQL parse failed:\n" + ddl + "\n", e);
        }
    }
    /**
     * 执行Show 语句
     * @param cmdCall
     */
    private void callShow(SqlCommandParser.SqlCommandCall cmdCall) {
        String ddl = cmdCall.operands[0];
        try {
             tEnv.executeSql(ddl).print();
        } catch (SqlParserException e) {
            throw new RuntimeException("SQL parse failed:\n" + ddl + "\n", e);
        }
    }

    /**
     * 执行 Insert语句
     * @param cmdCall
     */
    private void callInsertInto(SqlCommandParser.SqlCommandCall cmdCall) {
        String dml = cmdCall.operands[0];
        try {
            tEnv.sqlUpdate(dml);
        } catch (SqlParserException e) {
            throw new RuntimeException("SQL parse failed:\n" + dml + "\n", e);
        }
    }
    /**
     * 执行 Insert语句
     * @param cmdCall
     */
    private void callInsertOverWrite(SqlCommandParser.SqlCommandCall cmdCall) {
        String dml = cmdCall.operands[0];
        try {
            tEnv.sqlUpdate(dml);
        } catch (SqlParserException e) {
            throw new RuntimeException("SQL parse failed:\n" + dml + "\n", e);
        }
    }

    /**
     * 单独执行查询语句并返回查询结果
     * @param cmdCall
     * @return Table
     */
    private Table callSelect(SqlCommandParser.SqlCommandCall cmdCall) {
        String dml = cmdCall.operands[0];
        try {
            return tEnv.sqlQuery(dml);
        } catch (SqlParserException e) {
            throw new RuntimeException("SQL parse failed:\n" + dml + "\n", e);
        }
    }
}
