package com.fsql.utils;

import com.dtstack.flink.sql.constrant.ConfigConstrant;
import com.dtstack.flink.sql.util.MathUtil;
import com.fsql.entity.JarJobParameterInfo;
import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.flink.client.program.PackagedProgram;
import org.apache.flink.client.program.PackagedProgramUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;

import java.io.File;
import java.io.FileNotFoundException;
import java.util.Map;
import java.util.Properties;

/**
 * JogGraph工具类
 */
public class JobGraphUtils {

    /**
     * 构建jobGraph
     * @param jarJobParam
     * @return
     * @throws Exception
     */
    public static JobGraph buildJobGraph(JarJobParameterInfo jarJobParam) throws Exception {
        Map<String, Object> confMap = jarJobParam.getConfProp();

        Properties confProperties = JobUtils.mapToProperties(jarJobParam.getConfProp());
        jarJobParam.setConfProperties(confProperties);
        // TODO 获取并行度
        int parallelism = MathUtil.getIntegerVal(confProperties.getProperty(ConfigConstrant.SQL_ENV_PARALLELISM, "1"));

        String flinkConfDir = jarJobParam.getFlinkConf();

        String[] execArgs = jarJobParam.getArgs();

        File jarFile = new File(jarJobParam.getJarPath());

        if (StringUtils.isBlank(jarJobParam.getEntryClass())) {
            throw new Exception("入口函数" + jarJobParam.getEntryClass() + "不存在");
        }

        if (StringUtils.isBlank(jarJobParam.getJarPath()) || !jarFile.exists()) {
            throw new FileNotFoundException(jarJobParam.getJarPath()+"不存在");
        }

        SavepointRestoreSettings savepointRestoreSettings = dealSavepointRestoreSettings(confProperties);

        PackagedProgram packagedProgram = PackagedProgram.newBuilder()
                .setArguments(execArgs)
                .setJarFile(jarFile)
                .setEntryPointClassName(jarJobParam.getEntryClass())
                .setSavepointRestoreSettings(savepointRestoreSettings)
                .build();

        Configuration configuration = getFlinkConfiguration(jarJobParam.getFlinkConf(), confProperties);
        JobGraph jobGraph = PackagedProgramUtils.createJobGraph(packagedProgram, configuration, parallelism, false);
        return jobGraph;
    }

    /**
     * 构建SavepointRestoreSettings
     * @param confProperties
     * @return
     */
    protected static SavepointRestoreSettings dealSavepointRestoreSettings(Properties confProperties) {
        SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.none();
        // TODO 获取savePointPath的路径
        String savePointPath = confProperties.getProperty(ConfigConstrant.SAVE_POINT_PATH_KEY);
        if (StringUtils.isNotBlank(savePointPath)) {
            String allowNonRestoredState = confProperties.getOrDefault(ConfigConstrant.ALLOW_NON_RESTORED_STATE_KEY, "false").toString();
            savepointRestoreSettings = SavepointRestoreSettings.forPath(savePointPath, BooleanUtils.toBoolean(allowNonRestoredState));
        }
        return savepointRestoreSettings;
    }

    /**
     * 构建flink Configuration
     * @param flinkConfDir
     * @param confProperties
     * @return
     */
    public static Configuration getFlinkConfiguration(String flinkConfDir, Properties confProperties) {
        // TODO 加载全局路径flink-conf.yaml
        Configuration flinkConfig = StringUtils.isEmpty(flinkConfDir) ? new Configuration() : GlobalConfiguration.loadConfiguration(flinkConfDir);

        confProperties.forEach((key, val) -> flinkConfig.setString(key.toString(), val.toString()));
        return flinkConfig;
    }

}
