package com.sui.bigdata.flink.sql.core.exec;

import com.sui.bigdata.flink.sql.core.parser.CreateTmpTableParser;
import com.sui.bigdata.flink.sql.core.parser.InsertSqlParser;
import com.sui.bigdata.flink.sql.core.parser.SqlTree;
import com.sui.bigdata.flink.sql.core.side.SideSqlExec;
import com.sui.bigdata.flink.sql.core.side.SideTableInfo;
import com.sui.bigdata.flink.sql.core.sink.IStreamSinkGener;
import com.sui.bigdata.flink.sql.core.util.ClassUtil;
import com.sui.bigdata.flink.sql.core.util.DtStringUtil;
import com.sui.bigdata.flink.sql.core.util.FlinkUtil;
import com.sui.bigdata.flink.sql.sink.console.ConsoleSink;
import com.sui.bigdata.flink.sql.sink.console.table.ConsoleTableInfo;
import com.sui.bigdata.flink.sql.sink.kafka.KafkaSink;
import com.sui.bigdata.flink.sql.sink.kafka.table.KafkaSinkTableInfo;
import org.apache.calcite.config.Lex;
import org.apache.calcite.sql.SqlInsert;
import org.apache.calcite.sql.SqlNode;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.sinks.TableSink;
import org.apache.flink.table.types.DataType;

import java.util.Properties;
import java.util.UUID;


import java.util.Map;

/**
 * @description: mapping by name when insert into sink table
 * @author: maqi
 * @create: 2019/08/15 11:09
 */
public class FlinkSqlExec {

    private static org.apache.calcite.sql.parser.SqlParser.Config config = org.apache.calcite.sql.parser.SqlParser
            .configBuilder()
            .setLex(Lex.MYSQL)
            .build();

    public static void insertSqlExe(StreamTableEnvironment tableEnv,
                                    SqlTree sqlTree,
                                    Map<String, SideTableInfo> sideTableMap,
                                    Map<String, Table> registerTableCache,
                                    SideSqlExec sideSqlExec,
                                    InsertSqlParser.SqlParseResult result,
                                    int envParallelism) throws Exception {
        boolean isSide = false;
        for (String tableName : result.getTargetTableList()) {
            if (sqlTree.getTmpTableMap().containsKey(tableName)) {
                CreateTmpTableParser.SqlParserResult tmp = sqlTree.getTmpTableMap().get(tableName);
                String realSql = DtStringUtil.replaceIgnoreQuota(result.getExecSql(), "`", "");
                SqlNode sqlNode = org.apache.calcite.sql.parser.SqlParser.create(realSql, config).parseStmt();
                String tmpSql = ((SqlInsert) sqlNode).getSource().toString();
                tmp.setExecSql(tmpSql);
                sideSqlExec.registerTmpTable(tmp, sideTableMap, tableEnv, registerTableCache,envParallelism);
            } else {
                for (String sourceTable : result.getSourceTableList()) {
                    if (sideTableMap.containsKey(sourceTable)) {
                        isSide = true;
                        break;
                    }
                }

                if (isSide) {
                    //sql-dimensional table contains the dimension table of execution
                    sideSqlExec.exec(result.getExecSql(),null,null, sideTableMap, tableEnv, registerTableCache,envParallelism);
                } else {
                    tableEnv.sqlUpdate(result.getExecSql());
                }
            }
        }
    }

    public static void selectSqlExe(StreamTableEnvironment tableEnv,
                                    Properties  properties,
                                    String jobName,
                                    Map<String, SideTableInfo> sideTableMap,
                                    Map<String, Table> registerTableCache,
                                    SideSqlExec sideSqlExec,
                                    InsertSqlParser.SqlParseResult result,
                                    int envParallelism) throws Exception {
        boolean isSide = false;
        for (String sourceTable : result.getSourceTableList()) {
            if (sideTableMap.containsKey(sourceTable)) {
                isSide = true;
                break;
            }
        }

        if (isSide) {
            //sql-dimensional table contains the dimension table of execution
            sideSqlExec.exec(result.getExecSql(),properties,jobName, sideTableMap, tableEnv, registerTableCache,envParallelism);
        } else if ("console".equals(FlinkUtil.getSelectSink(properties))) {
            registerConsoleSink(tableEnv,result.getExecSql());
        }else {
            registerKafkaSink(tableEnv,properties,jobName,result.getExecSql());
        }
    }
    public static void registerConsoleSink(StreamTableEnvironment tableEnv, String sql) {
        Table table = tableEnv.sqlQuery(sql);
        TableSchema tableSchema=table.getSchema();
        IStreamSinkGener streamSinkGener = new ConsoleSink();
        ConsoleTableInfo consoleTableInfo =new ConsoleTableInfo();
        TableSink tableSink= (TableSink)streamSinkGener.genStreamSink(consoleTableInfo);

        String consoleTableName ="Console" + UUID.randomUUID();
        tableEnv.registerTableSink(consoleTableName, tableSchema.getFieldNames(), tableSchema.getFieldTypes(), tableSink);
        table.insertInto(consoleTableName);
    }

    public static void registerKafkaSink(StreamTableEnvironment tableEnv,Properties properties,String jobName, String sql) {
        Table table = tableEnv.sqlQuery(sql);
        TableSchema tableSchema=table.getSchema();
        IStreamSinkGener streamSinkGener = new KafkaSink();


        KafkaSinkTableInfo kakafSinkTableInfo = new KafkaSinkTableInfo();
        kakafSinkTableInfo.setFields(tableSchema.getFieldNames());
        kakafSinkTableInfo.setFieldTypes(getDataType(tableSchema.getFieldNames(),tableSchema.getFieldDataTypes()));
        for (int i=0 ; i<kakafSinkTableInfo.getFieldTypes().length;i++){
            kakafSinkTableInfo.addFieldClass(ClassUtil.stringConvertClass(kakafSinkTableInfo.getFieldTypes()[i]));
        }
        kakafSinkTableInfo.setFieldClasses(kakafSinkTableInfo.getFieldClassList().toArray(new Class[kakafSinkTableInfo.getFieldClassList().size()]));

        kakafSinkTableInfo.setJobName(jobName);
        kakafSinkTableInfo.setBootstrapServers(FlinkUtil.getSelectKafkaBootstrap(properties));
        kakafSinkTableInfo.setTopic(FlinkUtil.getSelectKafkaTopic(properties));

        TableSink tableSink= (TableSink)streamSinkGener.genStreamSink(kakafSinkTableInfo);

        String consoleTableName ="Kafka" + UUID.randomUUID();
        tableEnv.registerTableSink(consoleTableName, tableSchema.getFieldNames(), tableSchema.getFieldTypes(), tableSink);
        table.insertInto(consoleTableName);
    }

    public static String[] getDataType(String[] fieldName,DataType[] dataTypes){
        String[] fieldType = new String[fieldName.length];
        for(int i=0 ;i<fieldName.length;i++){
            fieldType[i] = dataTypes[i].toString().toLowerCase();
        }
        return fieldType;
    }
}

