package com.hub.realtime.flinkshims.core;

import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.config.ExecutionConfigOptions;
import org.apache.flink.table.api.config.OptimizerConfigOptions;
import org.apache.flink.table.api.config.TableConfigOptions;

import java.lang.reflect.Field;
import java.util.*;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
import java.util.stream.Collectors;

@Slf4j
public class FlinkSqlExecutor {

    private static final ReentrantReadWriteLock.WriteLock lock = (new ReentrantReadWriteLock()).writeLock();


    public static <T> Map<String, ConfigOption<T>> tableConfigOptions() {
        Map<String, ConfigOption<T>> configOptions = new HashMap<>();
        List<Class> configList = new ArrayList<>();
        configList.add(ExecutionConfigOptions.class);
        configList.add(OptimizerConfigOptions.class);
        configList.add(TableConfigOptions.class);
        configList.forEach(x -> configOptions.putAll(extractConfig(x)));
        return configOptions;
    }


    private static <T> Map<String, ConfigOption<T>> extractConfig(Class<T> clazz) {

        Map<String, ConfigOption<T>> configOptions = new HashMap<>();
        Arrays.stream(clazz.getDeclaredFields()).forEach(field -> {

            if (field.getType().isAssignableFrom(ConfigOption.class)) {
                try {
                    ConfigOption<T> configOption = (ConfigOption<T>) field.get(ConfigOption.class);
                    configOptions.put(configOption.key(), configOption);
                } catch (Exception ex) {
                    log.error("Fail to get ConfigOption", ex);
                }

            }
        });
        return configOptions;
    }


    /**
     * 执行sql
     *
     * @param sql
     * @param context
     */
    public static StatementSet executeSql(String sql, TableEnvironment context, Function<String, String> function) {
        StatementSet statementSet =null;
        List<String> insertArray = new ArrayList<>();
        //todo 如果前端使用了hivecatlog，需要注册
        SqlCommandParser.parseSQL(sql).forEach(x -> {
            String args = null;
            if (x.getOperands() != null || x.getOperands().length > 0) {
                args = x.getOperands()[0];
            }
            SqlCommand command = x.getCommand();
            switch (command) {
                case USE:
                    context.useDatabase(args);
                    logInfo(command.name().concat(": ").concat(args), function);
                    break;
                case USE_CATALOG:
                    context.useCatalog(args);
                    logInfo(command.name().concat(": ").concat(args), function);
                    break;
                case SHOW_CATALOGS:
                    String[] catalogs = context.listCatalogs();
                    String logs = StringUtils.join(catalogs, "\n");
                    logInfo(logs, function);
                    break;
                case SHOW_CURRENT_CATALOG:
                    String currentCatalog = context.getCurrentCatalog();
                    logInfo(command.name() + ": " + currentCatalog, function);
                    break;
                case SHOW_DATABASES:
                    String[] databases = context.listDatabases();
                    logInfo(command.name().concat(": ").concat(StringUtils.join(databases, "\n")), function);
                    break;
                case SHOW_CURRENT_DATABASE:
                    String database = context.getCurrentDatabase();
                    logInfo(command.name() + ": " + database, function);
                    break;
                case SHOW_TABLES:
                    String tables = Arrays.stream(context.listTables()).filter(t -> !t.startsWith("UnnamedTable")).collect(Collectors.joining("\n"));
                    logInfo(command.name().concat(": ").concat(tables), function);
                    break;
                case SHOW_FUNCTIONS:
                    String[] functions = context.listUserDefinedFunctions();
                    logInfo(command.name().concat(": ").concat(StringUtils.join(functions, "\n")), function);
                    break;
                case SHOW_MODULES:
                    String[] modules = context.listModules();
                    logInfo(command.name().concat(": ").concat(StringUtils.join(modules, "\n")), function);
                    break;
                case SET:
                    if (!FlinkSqlExecutor.tableConfigOptions().containsKey(args)) {
                        String error = args.concat(" is not a valid table/sql config, please check link: https://ci.apache.org/projects/flink/flink-docs-release-1.10/dev/table/config.html");
                        throw new IllegalArgumentException(error);
                    }
                    String operand = x.getOperands()[1];
                    if (TableConfigOptions.TABLE_SQL_DIALECT.key().equalsIgnoreCase(args)) {
                        try {
                            context.getConfig().setSqlDialect(SqlDialect.valueOf(operand.toUpperCase()));
                        } catch (Exception ex) {
                            throw new IllegalArgumentException(operand.concat(" is not a valid dialect"));
                        }
                    } else {
                        context.getConfig().getConfiguration().setString(args, operand);
                    }
                    logInfo(command.name().concat(": ".concat(args).concat(" --> ").concat(operand)), null);
                    break;
                case RESET:
                    try {
                        Field confDataField = Configuration.class.getDeclaredField("confData");
                        confDataField.setAccessible(true);
                        Map<String, Object> confData = (HashMap<String, Object>) confDataField.get(context.getConfig().getConfiguration());

                        if (args.toUpperCase().equals("ALL")) {
                            synchronized (confData) {
                                confData.clear();
                            }
                        } else {
                            synchronized (confData) {
                                confData.remove(args);
                            }
                        }
                        logInfo(command.name().concat(": ").concat(args), null);
                    } catch (Exception ex) {
                        throw new IllegalArgumentException(ex);
                    }
                    break;
                case DESC:
                case DESCRIBE:
                    TableSchema schema = context.scan(args).getSchema();
                    StringBuilder builder = new StringBuilder();
                    builder.append("Column\tType\n");
                    for (int i = 0; i < schema.getFieldCount(); i++) {
                        builder.append(schema.getFieldName(i).get() + "\t" + schema.getFieldDataType(i).get() + "\n");
                    }
                    logInfo(builder.toString(), function);
                    break;
                case EXPLAIN:
                    TableResult tableResult = context.executeSql(x.getOriginSql());
                    String r = tableResult.collect().next().getField(0).toString();
                    logInfo(r, function);
                    break;

                case INSERT_INTO:
                case INSERT_OVERWRITE:
                    //statementSet.addInsertSql(x.getOriginSql());
                    insertArray.add(x.getOriginSql());
                    break;
                case SELECT:
                    throw new IllegalArgumentException("[FLinkserver] Unsupported SELECT in current version.");
                case CREATE_FUNCTION:
                case DROP_FUNCTION:
                case ALTER_FUNCTION:
                case CREATE_CATALOG:
                case DROP_CATALOG:
                case CREATE_TABLE:
                case DROP_TABLE:
                case ALTER_TABLE:
                case CREATE_VIEW:
                case DROP_VIEW:
                case CREATE_DATABASE:
                case DROP_DATABASE:
                case ALTER_DATABASE:
                    try {
                        //  lock.lock();
                        TableResult result = context.executeSql(x.getOriginSql());
                        logInfo(command.name().concat(": ").concat(args), null);
                    } finally {
//                        if (lock.isHeldByCurrentThread()) {
//                            lock.unlock();
//                        }
                    }
                    break;
                default:
                    throw new IllegalArgumentException("[Flinkserver] Unsupported command: ".concat(command.name()));
            }
        });
        if (!insertArray.isEmpty()) {

            statementSet = context.createStatementSet();
            for(String insertSql:insertArray)
            {
                log.info("=====插入语句："+insertSql);
                statementSet.addInsertSql(insertSql);
            }
//            insertArray.forEach(s-> statementSet.addInsertSql(s));
//            TableResult result=  statementSet.execute();
//            if(result!=null)
//            {
//               try {
//                   JobID jobID = result.getJobClient().get().getJobID();
//                   if(jobID!=null)
//                   {
//                       logInfo("jobId: ".concat(jobID.toHexString()),null);
//                   }
//
//               }catch (Exception ex)
//               {
//
//               }
//            }

        }
        String endSql="\n\n\n==============flinkSql==============\n\n".concat(sql).concat("\n\n============================\n\n\n");
        logInfo(endSql,null);
        return statementSet;
    }

    /**
     * 打印执行log
     *
     * @param logStr
     */
    private static void logInfo(String logStr, Function<String, String> function) {
        if (function == null)
            log.info(logStr);
        else
            function.apply(logStr);
    }


}
