package t20250427;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.api.dag.Pipeline;
import org.apache.flink.client.FlinkPipelineTranslationUtil;
import org.apache.flink.client.deployment.application.executors.EmbeddedExecutor;
import org.apache.flink.client.program.*;
import org.apache.flink.client.program.rest.retry.ExponentialWaitStrategy;
import org.apache.flink.client.program.rest.retry.WaitStrategy;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.CoreOptions;
import org.apache.flink.configuration.DeploymentOptions;
import org.apache.flink.core.execution.PipelineExecutorServiceLoader;
import org.apache.flink.optimizer.CompilerException;
import org.apache.flink.runtime.client.JobInitializationException;
import org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobmaster.JobResult;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.graph.StreamEdge;
import org.apache.flink.streaming.api.graph.StreamGraph;
import org.apache.flink.streaming.api.graph.StreamNode;
import org.apache.flink.streaming.runtime.partitioner.RebalancePartitioner;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.UserDefinedFunction;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.util.SerializedThrowable;
import org.apache.flink.util.StringUtils;
import org.apache.flink.util.function.SupplierWithException;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintStream;
import java.math.BigInteger;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.MessageDigest;
import java.util.List;
import java.util.Optional;
import java.util.ServiceLoader;

import static org.apache.flink.util.FlinkUserCodeClassLoader.NOOP_EXCEPTION_HANDLER;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.util.Preconditions.checkState;

@Slf4j
public enum ProgramUtil {
    ;

    public static URLClassLoader buildUserCodeClassLoader(
            List<URL> jars, List<URL> classpaths, ClassLoader parent, Configuration configuration) {
        URL[] urls = new URL[jars.size() + classpaths.size()];
        for (int i = 0; i < jars.size(); i++) {
            urls[i] = jars.get(i);
        }
        for (int i = 0; i < classpaths.size(); i++) {
            urls[i + jars.size()] = classpaths.get(i);
        }
        final String[] alwaysParentFirstLoaderPatterns =
                CoreOptions.getParentFirstLoaderPatterns(configuration);
        final String classLoaderResolveOrder =
                configuration.getString(CoreOptions.CLASSLOADER_RESOLVE_ORDER);
        FlinkUserCodeClassLoaders.ResolveOrder resolveOrder =
                FlinkUserCodeClassLoaders.ResolveOrder.fromString(classLoaderResolveOrder);
        final boolean checkClassloaderLeak =
                configuration.getBoolean(CoreOptions.CHECK_LEAKED_CLASSLOADER);
        return FlinkUserCodeClassLoaders.create(
                resolveOrder,
                urls,
                parent,
                alwaysParentFirstLoaderPatterns,
                NOOP_EXCEPTION_HANDLER,
                checkClassloaderLeak);
    }


    public static StreamTableEnvironment getTableEnv() {
        Configuration configuration = new Configuration();
        configuration.setBoolean("table.dynamic-table-options.enabled", true);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(configuration);

        EnvironmentSettings settings = EnvironmentSettings.newInstance()
            .inStreamingMode()
            .build();

        StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(env, settings);
        ServiceLoader<UserDefinedFunction> load = ServiceLoader.load(UserDefinedFunction.class);
        if (!load.iterator().hasNext()) {
            log.warn("Failed to load user-defined functions.");
        }
        int size = 0;
        for (UserDefinedFunction userDefinedFunction : load) {
            streamTableEnv.createTemporarySystemFunction(userDefinedFunction.toString(), userDefinedFunction.getClass());
            size ++;
        }
        log.info("Total of {} user-defined function were registered.", size);


        return streamTableEnv;
    }

    public static void executeProgram(
            PipelineExecutorServiceLoader executorServiceLoader,
            Configuration configuration,
            PackagedProgram program,
            boolean enforceSingleJobExecution,
            boolean suppressSysout)
            throws ProgramInvocationException {
        checkNotNull(executorServiceLoader);
        final ClassLoader userCodeClassLoader = program.getUserCodeClassLoader();
        final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
        try {
            Thread.currentThread().setContextClassLoader(userCodeClassLoader);

            ContextEnvironment.setAsContext(
                    executorServiceLoader,
                    configuration,
                    userCodeClassLoader,
                    enforceSingleJobExecution,
                    suppressSysout);

            StreamContextEnvironment.setAsContext(
                    executorServiceLoader,
                    configuration,
                    userCodeClassLoader,
                    enforceSingleJobExecution,
                    suppressSysout);

            try {
                program.invokeInteractiveModeForExecution();
            } finally {
                ContextEnvironment.unsetAsContext();
                StreamContextEnvironment.unsetAsContext();
            }
        } finally {
            Thread.currentThread().setContextClassLoader(contextClassLoader);
        }
    }

    public static void waitUntilJobInitializationFinished(
            SupplierWithException<JobStatus, Exception> jobStatusSupplier,
            SupplierWithException<JobResult, Exception> jobResultSupplier,
            ClassLoader userCodeClassloader)
            throws JobInitializationException {
        WaitStrategy waitStrategy = new ExponentialWaitStrategy(50, 2000);
        try {
            JobStatus status = jobStatusSupplier.get();
            long attempt = 0;
            while (status == JobStatus.INITIALIZING) {
                Thread.sleep(waitStrategy.sleepTime(attempt++));
                status = jobStatusSupplier.get();
            }
            if (status == JobStatus.FAILED) {
                JobResult result = jobResultSupplier.get();
                Optional<SerializedThrowable> throwable = result.getSerializedThrowable();
                if (throwable.isPresent()) {
                    Throwable t = throwable.get().deserializeError(userCodeClassloader);
                    if (t instanceof JobInitializationException) {
                        throw t;
                    }
                }
            }
        } catch (JobInitializationException initializationException) {
            throw initializationException;
        } catch (Throwable throwable) {
            ExceptionUtils.checkInterrupted(throwable);
            throw new RuntimeException("Error while waiting for job to be initialized", throwable);
        }
    }

    public static JobGraph createJobGraph(
            PackagedProgram packagedProgram,
            Configuration configuration,
            int defaultParallelism,
            JobID jobID,
            boolean suppressOutput)
            throws ProgramInvocationException {
        final Pipeline pipeline =
                getPipelineFromProgram(
                        packagedProgram, configuration, defaultParallelism, suppressOutput);
        final JobGraph jobGraph =
                FlinkPipelineTranslationUtil.getJobGraphUnderUserClassLoader(
                        packagedProgram.getUserCodeClassLoader(),
                        pipeline,
                        configuration,
                        defaultParallelism);
        if (jobID != null) {
            jobGraph.setJobID(jobID);
        }
        jobGraph.addJars(packagedProgram.getJobJarAndDependencies());
        jobGraph.setClasspaths(packagedProgram.getClasspaths());
        jobGraph.setSavepointRestoreSettings(packagedProgram.getSavepointSettings());

        return jobGraph;
    }

    private static File getJarFile(String jarFilePath) throws FileNotFoundException {
        File jarFile = new File(jarFilePath);
        // Check if JAR file exists
        if (!jarFile.exists()) {
            throw new FileNotFoundException("JAR file does not exist: " + jarFile);
        } else if (!jarFile.isFile()) {
            throw new FileNotFoundException("JAR file is not a file: " + jarFile);
        }
        return jarFile;
    }

    public static JobGraph createJobGraph(
            PackagedProgram packagedProgram,
            Configuration configuration,
            int defaultParallelism,
            boolean suppressOutput)
            throws ProgramInvocationException {
        return createJobGraph(
                packagedProgram, configuration, defaultParallelism, null, suppressOutput);
    }

    public static JobGraph createJobGraph(StreamGraph streamGraph, PackagedProgram packagedProgram) {
        JobGraph jobGraph = streamGraph.getJobGraph();
        jobGraph.addJars(packagedProgram.getJobJarAndDependencies());
        jobGraph.setClasspaths(packagedProgram.getClasspaths());
        jobGraph.setSavepointRestoreSettings(packagedProgram.getSavepointSettings());
        return jobGraph;
    }

    public static StreamGraph createStreamGraph(PackagedProgram packagedProgram,
                                                Configuration configuration,
                                                int defaultParallelism,
                                                boolean suppressOutput) throws ProgramInvocationException {

        return (StreamGraph) getPipelineFromProgram(
                packagedProgram, configuration, defaultParallelism, suppressOutput);
    }

    public static StreamGraph createStreamGraph(PackagedProgram packagedProgram) throws ProgramInvocationException {

        return createStreamGraph(packagedProgram, new Configuration(), 1, false);
    }

    public static Pipeline getPipelineFromProgram(
            PackagedProgram program,
            Configuration configuration,
            int parallelism,
            boolean suppressOutput)
            throws CompilerException, ProgramInvocationException {
        final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();

        Thread.currentThread().setContextClassLoader(program.getUserCodeClassLoader());

        final PrintStream originalOut = System.out;
        final PrintStream originalErr = System.err;
        final ByteArrayOutputStream stdOutBuffer;
        final ByteArrayOutputStream stdErrBuffer;

        if (suppressOutput) {
            // temporarily write STDERR and STDOUT to a byte array.
            stdOutBuffer = new ByteArrayOutputStream();
            System.setOut(new PrintStream(stdOutBuffer));
            stdErrBuffer = new ByteArrayOutputStream();
            System.setErr(new PrintStream(stdErrBuffer));
        } else {
            stdOutBuffer = null;
            stdErrBuffer = null;
        }

        // temporary hack to support the optimizer plan preview
        OptimizerPlanEnvironment benv =
                new OptimizerPlanEnvironment(
                        configuration, program.getUserCodeClassLoader(), parallelism);
        benv.setAsContext();
        StreamPlanEnvironment senv =
                new StreamPlanEnvironment(
                        configuration, program.getUserCodeClassLoader(), parallelism);
        senv.setAsContext();

        try {
            program.invokeInteractiveModeForExecution();
        } catch (Throwable t) {
            if (benv.getPipeline() != null) {
                return benv.getPipeline();
            }

            if (senv.getPipeline() != null) {
                return senv.getPipeline();
            }

            if (t instanceof ProgramInvocationException) {
                throw t;
            }
            log.error(t.getMessage());
            throw generateException(
                    program, "The program caused an error: ", t, stdOutBuffer, stdErrBuffer);
        } finally {
            benv.unsetAsContext();
            senv.unsetAsContext();
            if (suppressOutput) {
                System.setOut(originalOut);
                System.setErr(originalErr);
            }
            Thread.currentThread().setContextClassLoader(contextClassLoader);
        }

        throw generateException(
                program,
                "The program plan could not be fetched - the program aborted pre-maturely.",
                null,
                stdOutBuffer,
                stdErrBuffer);
    }

    public static URI resolveURI(String path) throws URISyntaxException {
        final URI uri = new URI(path);
        if (uri.getScheme() != null) {
            return uri;
        }
        return new File(path).getAbsoluteFile().toURI();
    }

    private static ProgramInvocationException generateException(
            PackagedProgram program,
            String msg,
            Throwable cause,
            ByteArrayOutputStream stdoutBuffer,
            ByteArrayOutputStream stderrBuffer) {
        checkState(
                (stdoutBuffer != null) == (stderrBuffer != null),
                "Stderr/Stdout should either both be set or both be null.");

        final String stdout = (stdoutBuffer != null) ? stdoutBuffer.toString() : "";
        final String stderr = (stderrBuffer != null) ? stderrBuffer.toString() : "";

        return new ProgramInvocationException(
                String.format(
                        "%s\n\nClasspath: %s\n\nSystem.out: %s\n\nSystem.err: %s",
                        msg,
                        program.getJobJarAndDependencies(),
                        stdout.length() == 0 ? "(none)" : stdout,
                        stderr.length() == 0 ? "(none)" : stderr),
                cause);
    }


    /**
     * 修改StreamGraph
     * @param resolvedPlan json plan
     */
    @SneakyThrows
    public static void updateStreamGraph(StreamGraph streamGraph, String resolvedPlan) {
        if (!StringUtils.isNullOrWhitespaceOnly(resolvedPlan)) {
            // 调整并行度
            JSONArray resolvedPlanArray = JSON.parseObject(resolvedPlan).getJSONArray("nodes");
            MessageDigest md = MessageDigest.getInstance("MD5");
            for (StreamNode streamNode : streamGraph.getStreamNodes()) {
//                int nodeId = streamNode.getId();
                md.update(streamNode.getOperatorName().getBytes());
                String md5 = new BigInteger(1, md.digest()).toString(16);
                int parallelism = getResolvedNodeParallelism(md5, resolvedPlanArray);
                if (parallelism > 0) {
                    streamNode.setParallelism(parallelism);
                }
            }
            // 调整分区器
            String unresolvedParallelismPlan = streamGraph.getStreamingPlanAsJSON();
            String resolvedParallelismPlan = encryptStreamGraph(unresolvedParallelismPlan);
            JSONArray nodes = JSON.parseObject(resolvedParallelismPlan).getJSONArray("nodes");
            for (int i = 0; i < nodes.size(); i++) {
                JSONObject node = nodes.getJSONObject(i);
                // 如果存在父节点, 进行并行度比较
                if (node.containsKey("predecessors")) {
                    // 当前节点并行度
                    Integer currParallelism = node.getInteger("parallelism");
                    // 父节点ID(存在多个)
                    JSONArray predecessors = node.getJSONArray("predecessors");
                    for (int j = 0; j < predecessors.size(); j++) {
                        JSONObject predecessor = predecessors.getJSONObject(j);
                        // 父节点ID
                        String preId = predecessor.getString("id");
                        // 分区策略
                        String shipStrategy = predecessor.getString("ship_strategy");
                        // 查找父节点信息
                        Integer preParallelism = findPreNodeParallelism(nodes, preId);
                        // 如果子节点并行度和父节点并行度不同并且分区策略为FORWARD时, 则调整分区策略为REBALANCE
                        if (!currParallelism.equals(preParallelism) && shipStrategy.equalsIgnoreCase("FORWARD")) {
                            for (StreamNode streamNode : streamGraph.getStreamNodes()) {
                                md.update(streamNode.getOperatorName().getBytes());
                                String md5 = new BigInteger(1, md.digest()).toString(16);
                                if (md5.equals(preId)) {
                                    for (StreamEdge outEdge : streamNode.getOutEdges()) {
                                        outEdge.setPartitioner(new RebalancePartitioner<>());
                                    }
                                }
                            }
                        }


                    }
                }
            }
        }

    }

    @SneakyThrows
    public static String encryptStreamGraph(String unResolvedPlan) {
        JSONObject streamingPlanAsJSON = JSON.parseObject(unResolvedPlan);
        JSONArray nodes = streamingPlanAsJSON.getJSONArray("nodes");
        JSONObject result = new JSONObject();
        MessageDigest md = MessageDigest.getInstance("MD5");
        JSONArray tmp = new JSONArray();
        for (int i = 0; i < nodes.size(); i++) {
            JSONObject jsonObject = nodes.getJSONObject(i);
            md.update(jsonObject.getString("contents").getBytes());
            int id = jsonObject.getInteger("id");
            String md5 = new BigInteger(1, md.digest()).toString(16);
            jsonObject.put("id", md5);
            // 根据id查找子节点
            for (int j = 0; j < nodes.size(); j++) {
                JSONObject jsonObject2 = nodes.getJSONObject(j);
                if (jsonObject2.containsKey("predecessors")) {
                    JSONArray predecessors = jsonObject2.getJSONArray("predecessors");
                    for (int i1 = 0; i1 < predecessors.size(); i1++) {
                        JSONObject jsonObject1 = predecessors.getJSONObject(i1);
                        if (!jsonObject1.get("id").getClass().isAssignableFrom(String.class) && id == jsonObject1.getInteger("id")) {
                            jsonObject1.put("id", md5);
                            predecessors.set(i1, jsonObject1);
                            jsonObject2.put("predecessors", predecessors);
                            nodes.set(j, jsonObject2);
                        }
                    }

                }
            }
            tmp.add(jsonObject);
        }
        result.put("nodes", tmp);
        return result.toJSONString();
    }

    private static int getResolvedNodeParallelism(String md5, JSONArray resolvedPlan) {
        for (int i = 0; i < resolvedPlan.size(); i++) {
            if (resolvedPlan.getJSONObject(i).getString("id").equals(md5)) {
                return resolvedPlan.getJSONObject(i).getInteger("parallelism");
            }
        }
        return 0;
    }

    private static Integer findPreNodeParallelism(JSONArray nodes, String preId) {
        for (int i = 0; i < nodes.size(); i++) {
            JSONObject node = nodes.getJSONObject(i);
            if (preId.equals(node.getString("id"))) {
                return node.getInteger("parallelism");
            }
        }
        return null;
    }
}
