package com.apex.core;

import com.alibaba.fastjson.JSONObject;
import com.apex.env.Execution;
import com.apex.flink.FlinkBaseSink;
import com.apex.flink.FlinkBaseSource;
import com.apex.flink.FlinkBaseTransform;
import com.apex.flink.FlinkEnvironment;
import com.apex.flink.batch.FlinkBatchExecution;
import com.apex.flink.stream.FlinkStreamExecution;
import com.apex.flink.utils.ConfigKeyName;
import com.apex.flink.utils.SchemaUtil;
import com.apex.utils.CliUtils;
import com.apex.utils.PluginType;
import com.apex.utils.RegisteredPlugin;
import com.apex.utils.TableSchema;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;

/**
 * mvn clean package -Dmaven.test.skip=true
 */

public class RunFlinkEngine {
    private static final Logger logger = LoggerFactory.getLogger(RunFlinkEngine.class);
    private static String tableName = "";
    public static void main(String[] args) {
        if (args.length < 1){
            logger.error("*.json not find please check your config file then retry");
            System.exit(0);
        }
        String configFile;
        try{
            configFile = CliUtils.parseCli(args);
            FlinkEnvironment environment = new FlinkEnvironment();
            logger.info("user input config file : \n"+configFile);

            Config config = ConfigFactory.parseFile(new File(configFile));

            String environmentEngine = config.getString(ConfigKeyName.JOB_ENGINE);

            //是否存在json schema
            if (config.hasPath(ConfigKeyName.FLINK_TABLE_SCHEMA)){
                Map<String,JSONObject> schemaMap = TableSchema.parseTableSchema(configFile);
                assert schemaMap != null;
                schemaMap.keySet().forEach( e -> tableName = e);
                RowTypeInfo rowTypeInfo = SchemaUtil.getTypeInformation(schemaMap.get(tableName));
                environment.setRowTypeInfo(rowTypeInfo);
                environment.setQueryTable(tableName);
                environment.setSchemaJson(schemaMap.get(tableName).toJSONString());
            }

            String sourcePlugin = config.getString(ConfigKeyName.JOB_SOURCE_PLUGIN_NAME);
            String transformPlugin = config.getString(ConfigKeyName.JOB_TRANSFORM_PLUGIN_NAME);
            String sinkPlugin = config.getString(ConfigKeyName.JOB_SINK_PLUGIN_NAME);
            boolean isStreaming = config.getBoolean(ConfigKeyName.JOB_ENGINE_RUN_MODE);

            //查找是否存在用户自定义SQL脚本文件
            if (isStreaming){
                if (config.hasPath(ConfigKeyName.FLINK_RUN_SQLFilePath)){
                    String sqlFilePath = config.getString(ConfigKeyName.FLINK_RUN_SQLFilePath);
                    if (!StringUtils.isBlank(sqlFilePath)){
                        String workSpace = config.getString(ConfigKeyName.FLINK_RUN_WORKSPACE);
                        List<String> sqlAll = Files.readAllLines(Paths.get(workSpace + "/" + sqlFilePath));
                        environment.setSqlAll(sqlAll);
                    }
                }
            }
            //插件注册
            RegisteredPlugin registeredPlugin = new RegisteredPlugin();
            registeredPlugin.configEngine(environmentEngine);
            List<FlinkBaseSource> sources =
                    registeredPlugin.createPlugins(PluginType.SOURCE, sourcePlugin);
            List<FlinkBaseTransform> transforms =
                    registeredPlugin.createPlugins(PluginType.TRANSFORM, transformPlugin);
            List<FlinkBaseSink> sinks = registeredPlugin.createPlugins(PluginType.SINK, sinkPlugin);
            //配置管理对象
            environment.setConfig(config);
            //执行环境上下文初始化判断
            environment.prepare(isStreaming);
            //执行环境判断
            Execution execution;
            if (isStreaming){
                execution = new FlinkStreamExecution(environment);
            }else {
                execution = new FlinkBatchExecution(environment);
            }
            //准备执行环境
            registeredPlugin.prepare(environment, sources, transforms, sinks);
            //启动作业
            execution.start(sources, transforms, sinks);

        }catch (Exception e){
            logger.error("run engine error");
            e.printStackTrace();
        }

    }
}
