package com.fsql.utils;

import com.dtstack.flink.sql.launcher.entity.JobParamsInfo;
import com.dtstack.flink.sql.launcher.factory.StandaloneClientFactory;
import com.dtstack.flink.sql.launcher.utils.JobGraphBuildUtil;
import com.fsql.entity.JarJobParameterInfo;
import com.fsql.executor.FSqlStandaloneExecutor;
import com.fsql.launcher.LauncherMain;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.client.ClientUtils;
import org.apache.flink.client.deployment.ClusterDescriptor;
import org.apache.flink.client.deployment.StandaloneClusterId;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.client.program.ClusterClientProvider;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;

public class JobUtils {

    private static final Logger LOG = LoggerFactory.getLogger(JobUtils.class);

    /**
     * 提交sql任务
     * @param conf
     * @return
     */
    public static String submitStandaloneJob(Map<String, Object> conf) {
        try {
            List<String> propertiesList = new ArrayList<>();
            for (Map.Entry<String, Object> keyValue : conf.entrySet()) {
                propertiesList.add(keyValue.getKey());
                propertiesList.add(keyValue.getValue().toString());
            }

            JobParamsInfo jobParamsInfo = LauncherMain.parseArgs(propertiesList.toArray(new String[0]));
            String jobId = new FSqlStandaloneExecutor(jobParamsInfo).exec();

            return jobId;
        } catch (Exception e) {
            LOG.error(e.getMessage());
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 提交jar任务
     * @param jarJobParam
     * @return
     */
    public static String submitStandaloneJob(JarJobParameterInfo jarJobParam) {
        ClusterDescriptor clusterDescriptor = null;
        try {
            JobGraph jobGraph = JobGraphUtils.buildJobGraph(jarJobParam);

            Configuration flinkConfiguration = JobGraphBuildUtil.getFlinkConfiguration(jarJobParam.getFlinkConf(), jarJobParam.getConfProperties());

            clusterDescriptor = StandaloneClientFactory.INSTANCE.createClusterDescriptor("", flinkConfiguration);

            ClusterClientProvider clusterClientProvider = clusterDescriptor.retrieve(StandaloneClusterId.getInstance());
            ClusterClient clusterClient = clusterClientProvider.getClusterClient();

            JobExecutionResult jobExecutionResult = ClientUtils.submitJob(clusterClient, jobGraph);
            String jobId = jobExecutionResult.getJobID().toString();
            LOG.info("jobID:" + jobId);
            return jobId;
        } catch (Exception ex) {
            LOG.error(ex.getMessage());
            ex.printStackTrace();
        } finally {
            if (clusterDescriptor != null) {
                try {
                    clusterDescriptor.close();
                } catch (Exception e) {
                    LOG.info("Could not properly close the yarn cluster descriptor.", e);
                }
            }
        }
        return null;
    }

    /**
     * map转properties
     * @param map
     * @return
     */
    public static Properties mapToProperties(Map<String, Object> map) {
        Properties properties = new Properties();
        map.forEach((k,v) -> properties.put(k, v));
        return properties;
    }

}
