/**
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 * <p>
 * http://www.apache.org/licenses/LICENSE-2.0
 * <p>
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.qlangtech.plugins.incr.flink.launch;

import com.qlangtech.plugins.incr.flink.launch.clustertype.ClusterType;
import com.qlangtech.tis.config.k8s.ReplicasSpec;
import com.qlangtech.tis.coredefine.module.action.IDeploymentDetail;
import com.qlangtech.tis.coredefine.module.action.IFlinkIncrJobStatus;
import com.qlangtech.tis.coredefine.module.action.IFlinkIncrJobStatus.State;
import com.qlangtech.tis.coredefine.module.action.IRCController.SupportTriggerSavePointResult;
import com.qlangtech.tis.coredefine.module.action.TargetResName;
import com.qlangtech.tis.coredefine.module.action.impl.FlinkJobDeploymentDetails;
import com.qlangtech.tis.datax.job.ServerLaunchToken;
import com.qlangtech.tis.lang.TisException;
import com.qlangtech.tis.manage.common.incr.UberJarUtil;
import com.qlangtech.tis.plugin.incr.IncrStreamFactory;
import com.qlangtech.tis.plugins.flink.client.JarSubmitFlinkRequest;
import com.qlangtech.tis.util.HeteroEnum;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.client.program.rest.RestClusterClient;
import org.apache.flink.core.execution.SavepointFormatType;
import org.apache.flink.runtime.messages.Acknowledge;
import org.apache.flink.runtime.rest.messages.job.JobDetailsInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;

/**
 * 增量任务提交到Flink集群网关
 *
 * @author: 百岁（baisui@qlangtech.com）
 * @create: 2021-10-20 13:39
 **/
public class FlinkTaskNodeController {
    private static final Logger logger = LoggerFactory.getLogger(FlinkTaskNodeController.class);
    private final TISFlinkCDCStreamFactory factory;
    public static final String CHECKPOINT_DIR_PREFIX = "chk-";


    public FlinkTaskNodeController(TISFlinkCDCStreamFactory factory) {
        this.factory = factory;
    }

    public static void main(String[] args) {

    }


    //@Override
//    public void checkUseable() {
//        ClusterType cluster = factory.getClusterCfg();
//        cluster.checkUseable();
//    }

    /**
     * 重新启动
     *
     * @param collection
     * @param targetPod
     */

    public void relaunch(TargetResName collection, String... targetPod) {
        this.relaunch(collection, (p) -> {
            String savepointPath = p.getLeft();
            IFlinkIncrJobStatus<JobID> status = p.getRight();
            return (status.containSavepoint(savepointPath)).isPresent();
        }, targetPod);
    }

    /**
     * @param collection
     * @param savepointValidator 校验savepointpoint Path 是否存在
     * @param targetPod
     */
    private void relaunch(TargetResName collection
            , Function<Pair<String, IFlinkIncrJobStatus<JobID>>, Boolean> savepointValidator
            , String... targetPod) {
        IFlinkIncrJobStatus<JobID> status = getIncrJobStatus(collection);
        try {
            for (String savepointPath : targetPod) {
                if ((status.getState() == IFlinkIncrJobStatus.State.STOPED
                        || !((FlinkJobDeploymentDetails) getRCDeployment(collection)).isRunning())
                        && savepointValidator.apply(Pair.of(savepointPath, status)) //(status.containSavepoint(savepointPath)).isPresent()
                ) {
                    File streamUberJar = UberJarUtil.getStreamUberJarFile(collection).getFile();
                    if (!streamUberJar.exists()) {
                        throw new IllegalStateException("streamUberJar is not exist:" + streamUberJar.getAbsolutePath());
                    }
                    this.deploy(collection, streamUberJar
                            , (request) -> {
                                request.setSavepointPath(savepointPath);
                                request.setAllowNonRestoredState(true);
                            }, (jobId) -> {
                                status.relaunch(jobId);
                            });
                    return;
                }
            }
        } catch (Exception e) {
            throw new RuntimeException(collection.getName(), e);
        }

        List<IFlinkIncrJobStatus.FlinkSavepoint> savepoints = status.getSavepointPaths();
        throw new IllegalStateException("targetPod length:" + targetPod.length
                + "，jobid:" + status.getLaunchJobID() + ",status:" + status.getState()
                + ",stored path:" + savepoints.stream().map((p) -> p.getPath()).collect(Collectors.joining(",")));
    }


    public void restoreFromCheckpoint(TargetResName collection, Integer checkpointId) {

        IncrStreamFactory streamFactory = getStreamFactory(collection);
        Optional<IncrStreamFactory.ISavePointSupport> savePointSupport = streamFactory.restorable();
        if (!savePointSupport.isPresent()) {
            throw TisException.create("app:" + collection.getName() + " is not support savePoint");
        }
        IncrStreamFactory.ISavePointSupport spSupport = savePointSupport.get();

        final String checkpointPath = getRestoreCheckpointPath(collection, spSupport, String.valueOf(checkpointId));


        this.relaunch(collection, (p) -> {
            return true;
        }, checkpointPath);
    }

    public static String getRestoreCheckpointPath(
            TargetResName collection, IncrStreamFactory.ISavePointSupport spSupport, String checkpointId) {
        IFlinkIncrJobStatus<JobID> status = getIncrJobStatus(collection);
        return getRestoreCheckpointPath(status.getLaunchJobID(), spSupport, checkpointId);
    }

    public static String getRestoreCheckpointPath(
            JobID jobID, IncrStreamFactory.ISavePointSupport spSupport, String checkpointId) {
        Objects.requireNonNull(jobID, "param jobID can not be null");
        Objects.requireNonNull(spSupport, "param spSupport can not be null");
        Objects.requireNonNull(checkpointId, "param checkpointId can not be null");
        // IFlinkIncrJobStatus<JobID> status = getIncrJobStatus(collection);
        // JobID jobID = status.getLaunchJobID();
        final String checkpointPath = spSupport.getSavePointRootPath() + "/" + jobID.toHexString() + "/" + CHECKPOINT_DIR_PREFIX + checkpointId;
        logger.info("restore flink job with checkpoint path:" + checkpointPath);
        return checkpointPath;
    }


    public void deploy(TargetResName collection, ReplicasSpec incrSpec, long timestamp) throws Exception {


//        File streamUberJar = UberJarUtil.createStreamUberJar(collection, timestamp);
//
//        this.deploy(collection, streamUberJar
//                , (request) -> {
//                }, (jobId) -> {
//                    IFlinkIncrJobStatus incrJob = getIncrJobStatus(collection);
//                    incrJob.createNewJob(jobId);
//                });
    }

    //
//
    private void deploy(TargetResName collection, File streamUberJar
            , Consumer<JarSubmitFlinkRequest> requestSetter, Consumer<JobID> afterSuccess) throws Exception {

        factory.cluster.deploy(this.factory, collection, streamUberJar, requestSetter, afterSuccess);
    }


    private static IFlinkIncrJobStatus<JobID> getIncrJobStatus(TargetResName collection) {
        IncrStreamFactory incrFactory = getStreamFactory(collection);
        return incrFactory.getIncrJobStatus(collection);
    }

    private static IncrStreamFactory getStreamFactory(TargetResName collection) {
        return HeteroEnum.getIncrStreamFactory(collection.getName());
    }


    public IDeploymentDetail getRCDeployment(TargetResName collection) {
        ExtendFlinkJobDeploymentDetails rcDeployment = null;
        IFlinkIncrJobStatus<JobID> incrJobStatus = getIncrJobStatus(collection);
//        final FlinkJobDeploymentDetails noneStateDetail
//                = new FlinkJobDeploymentDetails(factory.getClusterCfg(), incrJobStatus) {
//            @Override
//            public boolean isRunning() {
//                return false;
//            }
//        };
        if (incrJobStatus.getState() == IFlinkIncrJobStatus.State.NONE) {
            // stop 或者压根没有创建
            return FlinkJobDeploymentDetails.noneState(factory.getClusterCfg(), incrJobStatus);
        }
        JobID launchJobID = incrJobStatus.getLaunchJobID();
        try {
            try (RestClusterClient restClient = (RestClusterClient) this.factory.getFlinkCluster()) {
                CompletableFuture<JobStatus> jobStatus = restClient.getJobStatus(launchJobID);
                JobStatus status = jobStatus.get(5, TimeUnit.SECONDS);
                if (status == null || status.isTerminalState()) {
                    incrJobStatus.setState(convertTerminalFlinkJobStatus(status));
                    return FlinkJobDeploymentDetails.noneState(factory.getClusterCfg(), incrJobStatus);
                }

                CompletableFuture<JobDetailsInfo> jobDetails = restClient.getJobDetails(launchJobID);
                JobDetailsInfo jobDetailsInfo = jobDetails.get(5, TimeUnit.SECONDS);
                rcDeployment = new ExtendFlinkJobDeploymentDetails(factory.getClusterCfg(), incrJobStatus, jobDetailsInfo);
                return rcDeployment;
            }
        } catch (TimeoutException e) {
            ClusterType clusterCfg = this.factory.getClusterCfg();
            throw TisException.create(//"flinkClusterId:" + clusterCfg.getClusterId()
                    ",Address:" + clusterCfg.getJobManagerAddress().getUrl() + "连接超时，请检查相应配置是否正确", e);
        } catch (ExecutionException e) {
            Throwable cause = e.getCause();
            if (isNotFoundException(cause)) {
                logger.warn(e.getMessage(), e);
                incrJobStatus.setState(State.FAILED);
                return FlinkJobDeploymentDetails.noneState(factory.getClusterCfg(), incrJobStatus);
            }
            throw new RuntimeException(e);
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    private State convertTerminalFlinkJobStatus(JobStatus status) {
        switch (status) {
            case FAILED:
            case FAILING:
                return State.FAILED;
            default:
                return State.DISAPPEAR;
        }
    }

    private static boolean isNotFoundException(Throwable cause) {
        return StringUtils.indexOf(cause.getMessage(), "NotFoundException") > -1;
    }


    public SupportTriggerSavePointResult supportTriggerSavePoint(TargetResName collection) {
        SupportTriggerSavePointResult result = new SupportTriggerSavePointResult(false);
        ValidateFlinkJob validateFlinkJob = new ValidateFlinkJob(collection) {
            @Override
            protected void processCollectionNotSupportSavePoint(StateBackendFactory stateBackend) {
                //super.processCollectionNotSupportSavePoint(stateBackend);
                result.setUnSupportReason("当前实例不支持Flink SavePoint，请修改stateBackend配置以支持持久化StateBackend");
            }

            @Override
            protected void processJobNotRunning() {
                result.setUnSupportReason("当前实例状态不在运行中，不能执行该操作");
                //return super.processJobNotRunning();
            }
        };

        return validateFlinkJob.valiate().validateSucess
                ? new SupportTriggerSavePointResult(true) : result;
    }

    /**
     * 创建一个Savepoint
     *
     * @param collection
     */

    public void triggerSavePoint(TargetResName collection) {
        processFlinkJob(collection, (restClient, savePoint, status) -> {
            String savepointDirectory = savePoint.createSavePointPath();
            CompletableFuture<String> result
                    = restClient.triggerSavepoint(status.getLaunchJobID(), savepointDirectory, SavepointFormatType.DEFAULT);
            status.addSavePoint(result.get(25, TimeUnit.SECONDS), IFlinkIncrJobStatus.State.RUNNING);
        });
    }


    public void discardSavepoint(TargetResName collection, String savepointPath) {
//        FlinkIncrJobStatus status = getIncrJobStatus(resName);
//        status.discardSavepoint(savepointPath);


        processFlinkJob(collection, (restClient, savePoint, status) -> {

            CompletableFuture<Acknowledge> result = restClient.disposeSavepoint(savepointPath);

            result.get(25, TimeUnit.SECONDS);
            // String savepointDirectory = savePoint.createSavePointPath();
            status.discardSavepoint(savepointPath);
//            CompletableFuture<String> result
//                    = restClient.triggerSavepoint(status.getLaunchJobID(), savepointDirectory);
            //status.addSavePoint(result.get(25, TimeUnit.SECONDS), IFlinkIncrJobStatus.State.RUNNING);
        });

    }

    private void processFlinkJob(TargetResName collection, FlinkJobFunc jobFunc) {
        ValidateFlinkJob validateFlinkJob = new ValidateFlinkJob(collection).valiate();
        IFlinkIncrJobStatus<JobID> status = validateFlinkJob.getStatus();
        IncrStreamFactory.ISavePointSupport savePoint = validateFlinkJob.getSavePoint();
        if (savePoint == null) {
            throw new NullPointerException("savePoint can not be null");
        }


        try (RestClusterClient restClient = (RestClusterClient) this.factory.getFlinkCluster()) {
            CompletableFuture<JobStatus> jobStatus = restClient.getJobStatus(status.getLaunchJobID());
            JobStatus s = jobStatus.get(5, TimeUnit.SECONDS);
            if (s != null && !s.isTerminalState()) {

                jobFunc.apply(restClient, savePoint, status);
                //job 任务没有终止，立即停止
//                String savepointDirectory = savePoint.createSavePointPath();
//                CompletableFuture<String> result
//                        = restClient.stopWithSavepoint(status.getLaunchJobID(), true, savepointDirectory);
//                status.stop(result.get(25, TimeUnit.SECONDS));
            }

        } catch (Exception e) {
            throw new RuntimeException("appname:" + collection.getName(), e);
        }
    }

    private interface FlinkJobFunc {
        public void apply(RestClusterClient restClient
                , IncrStreamFactory.ISavePointSupport savePoint, IFlinkIncrJobStatus<JobID> status) throws Exception;
    }


    public void stopInstance(TargetResName collection) {
        processFlinkJob(collection, (restClient, savePoint, status) -> {
            //job 任务没有终止，立即停止
            String savepointDirectory = savePoint.createSavePointPath();
            // advanceToEndOfTime - flag indicating if the source should inject a MAX_WATERMARK in the pipeline
            CompletableFuture<String> result
                    = restClient.stopWithSavepoint(status.getLaunchJobID(), true, savepointDirectory, SavepointFormatType.DEFAULT);
            status.stop(result.get(3, TimeUnit.MINUTES));
            // status.stop(result.get());
        });
    }


    public void removeInstance(TargetResName collection) throws Exception {
        IFlinkIncrJobStatus<JobID> status = getIncrJobStatus(collection);
        try {

            if (status.getLaunchJobID() == null) {
                throw new IllegalStateException("have not found any launhed job,app:" + collection.getName());
            }

            // 将启动日志文件删除
            ServerLaunchToken launchToken = this.factory.getLaunchToken(collection);
            launchToken.deleteLaunchToken();

            JobID jobID = status.getLaunchJobID();
            try (ClusterClient restClient = this.factory.getFlinkCluster()) {
                // 先删除掉，可能cluster中
                CompletableFuture<JobStatus> jobStatus = restClient.getJobStatus(jobID);
                JobStatus s = jobStatus.get(5, TimeUnit.SECONDS);
                if (s != null && !s.isTerminalState()) {
                    //job 任务没有终止，立即停止
                    CompletableFuture<Acknowledge> result = restClient.cancel(jobID);
                    result.get(5, TimeUnit.SECONDS);
                }
                status.cancel();
            }
        } catch (Exception e) {
            Throwable[] throwables = ExceptionUtils.getThrowables(e);
            for (Throwable cause : throwables) {
                if (isNotFoundException(cause)) {
                    logger.warn(cause.getMessage() + ":" + collection.getName());
                    status.cancel();
                    return;
                }
            }
            throw new RuntimeException(e);
        }
    }


//    public WatchPodLog listPodAndWatchLog(TargetResName collection, String podName, ILogListener listener) {
//        return null;
//    }

    private class ValidateFlinkJob {
        protected TargetResName collection;
        private final IFlinkIncrJobStatus<JobID> status;
        private IncrStreamFactory.ISavePointSupport savePoint;
        private boolean validateSucess = true;

        public ValidateFlinkJob fail() {
            this.validateSucess = false;
            return this;
        }

        public ValidateFlinkJob(TargetResName collection) {
            this.collection = collection;
            status = getIncrJobStatus(collection);
        }

        public IFlinkIncrJobStatus<JobID> getStatus() {
            return status;
        }

        public IncrStreamFactory.ISavePointSupport getSavePoint() {
            return savePoint;
        }

        public ValidateFlinkJob valiate() {

            // IncrJobStatus launchJobID = getIncrJobRecordFile(collection);  // getLaunchJobID(collection);
            if (status.getState() != IFlinkIncrJobStatus.State.RUNNING) {
                processJobNotRunning();
                return fail();
            }
            if (status.getLaunchJobID() == null) {
                throw new IllegalStateException("have not found any launhed job,app:" + collection.getName());
            }

            StateBackendFactory stateBackend = factory.stateBackend;
            savePoint = null;

            Optional<IncrStreamFactory.ISavePointSupport> savePointSupport = null;

            if (!(savePointSupport = StateBackendFactory.getSavePointSupport(stateBackend)).isPresent()) {
                processCollectionNotSupportSavePoint(stateBackend);
                return fail();
            } else {
                savePoint = savePointSupport.get();
            }

//            if ((stateBackend instanceof IncrStreamFactory.ISavePointSupport)) {
//
//            }
//            if (!(stateBackend instanceof StateBackendFactory.ISavePointSupport)
//                    || !(savePoint = (StateBackendFactory.ISavePointSupport) stateBackend).supportSavePoint()) {
//                processCollectionNotSupportSavePoint(stateBackend);
//                return fail();
//            }
            return this;
        }

        protected void processCollectionNotSupportSavePoint(StateBackendFactory stateBackend) {
            throw TisException.create("app:" + collection.getName()
                    + " is not support savePoint,stateFactoryClass:" + stateBackend.getClass().getName());
        }

        protected void processJobNotRunning() {
            throw new IllegalStateException("collection:" + collection.getName()
                    + " intend to stop incr processing,state must be running ,but now is " + status.getState());
        }
    }
}
