package com.calabar.phm.etl.driver.spark;

import org.apache.commons.lang.ArrayUtils;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @Author zmc <mingcheng.zhang@cdcalabar.com>
 * @Date 17-5-27 下午4:39
 * @Version v1.0
 * @Des spark 驱动需要的参数
 */
public class Params {
    /**
     * 任务编号
     */
    private String jobId;
    /**
     * 子任务编号
     */
    private String subJobId;
    /**
     * spark运行参数
     */
    private Map<String, String> sparkEnv = new HashMap<>();
    /**
     * 算子列表
     */
    private List<Operator> operators = new ArrayList<>();
    /**
     * 算子关系
     */
    private Map<String, List<String>> connection = new HashMap<>();

    public Map<String, Operator> getOperatorMap() {
        Map<String, Operator> operatorMap = new HashMap<>();
        for (Operator operator : operators) {
            operatorMap.put(operator.getOperatorUnique(), operator);
        }
        return operatorMap;
    }

    public void setConnection(Map<String, List<String>> connection) {
        this.connection = connection;
    }

    public void setJobId(String jobId) {
        this.jobId = jobId;
    }

    public Map<String, String> getSparkEnv() {
        return sparkEnv;
    }

    public void setSparkEnv(Map<String, String> sparkEnv) {
        this.sparkEnv = sparkEnv;
    }

    public String getJobId() {
        return jobId;
    }


    public List<Operator> getOperators() {
        return operators;
    }

    public void setOperators(List<Operator> operators) {
        this.operators = operators;
    }

    public Map<String, List<String>> getConnection() {
        return connection;
    }

    public String getSubJobId() {
        return subJobId;
    }

    public void setSubJobId(String subJobId) {
        this.subJobId = subJobId;
    }

    @Override
    public String toString() {
        StringBuilder sb = new StringBuilder();
        sb.append("-- jobId : ")
                .append(this.getJobId())
                .append("\n")
                .append("-- sprk env --")
                .append("\n");
        for (Map.Entry<String, String> conf : sparkEnv.entrySet()) {
            sb.append(conf.getKey()).append(" : ")
                    .append(conf.getValue()).append("\n");
        }

        sb.append("-- operator --").append("\n");
        for (Operator operator : operators) {
            sb.append(operator).append("\n");
        }

        sb.append("-- connection --").append("\n");
        for (Map.Entry<String, List<String>> conn : connection.entrySet()) {
            sb.append(conn.getKey()).append(" <-- ")
                    .append(ArrayUtils.toString(conn.getValue()))
                    .append("\n");
        }
        return sb.toString();
    }
}

