/*
 *  Copyright 2020-2025 the original author or authors.
 *  You cannot use this file unless authorized by the author.
 */

package org.ipig.computing.spark;

import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.ipig.commons.service.cmd.CommandService;
import org.ipig.computing.LauncherService;
import org.ipig.computing.hadoop.yarn.YarnHelper;
import org.ipig.computing.spark.conf.ApplicationConf;
import org.ipig.constants.ResultStatus;

import java.util.ArrayList;
import java.util.List;

/**
 * 通用spark launcher
 *
 * @author <a href="mailto:comchnts@163.com">chinats</a>
 * @since 1.0
 */
@Slf4j
@Data
public class GenericSparkLauncher implements LauncherService<ApplicationConf, ResultStatus> {
    /**
     * 命令执行器服务列表
     */
    private List<CommandService<ApplicationConf, ResultStatus>> executorList = new ArrayList<CommandService<ApplicationConf, ResultStatus>>(4);
    private String hostAndPorts = "s1:8088,s2:8088";

    @Override
    public ResultStatus launch(final ApplicationConf conf) {
        ResultStatus resultStatus = ResultStatus.SUCCESS;
        if (executorList != null && !executorList.isEmpty()) {
            String haActiveHost = YarnHelper.getHaActiveHost(hostAndPorts);
            System.out.println(haActiveHost);
            //有序执行
            for (CommandService<ApplicationConf, ResultStatus> service : executorList) {
                if (YarnHelper.checkAppRunning(conf.getAppName(), haActiveHost) == ResultStatus.SUCCESS) {
                    resultStatus = service.execute(conf);
                    if (resultStatus == ResultStatus.FAILURE) {
                        break;//只要有一个执行失败，就返回失败，不再执行。
                    }
                }
            }
        }
        return resultStatus;
    }


}
