package edu.zju.gis.dldsj.service.impl;

import edu.zju.gis.dldsj.config.CommonSetting;
import edu.zju.gis.dldsj.config.QueryConfig;
import edu.zju.gis.dldsj.dao.ParallelModelMapper;
import edu.zju.gis.dldsj.entity.ParallelModel;
import edu.zju.gis.dldsj.entity.ParallelModelWithBLOBs;
import edu.zju.gis.dldsj.service.ParallelModelService;
import org.json.JSONArray;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.Collection;
import java.util.List;

/**
 * @author yanlo yanlong_lee@qq.com
 * @version 1.0 2018/08/01
 */
@Service
public class ParallelModelServiceImpl implements ParallelModelService {
    @Autowired
    private CommonSetting setting;
    @Autowired
    private QueryConfig queryConfig;
    @Autowired
    private ParallelModelMapper parallelModelMapper;

    @Override
    public ParallelModelWithBLOBs select(String pk) {
        return parallelModelMapper.selectByPrimaryKey(pk);
    }

    @Override
    public int insert(ParallelModelWithBLOBs parallelModelWithBLOBs) {
        return parallelModelMapper.insertSelective(parallelModelWithBLOBs);
    }

    @Override
    public void update(ParallelModelWithBLOBs parallelModelWithBLOBs) {
        parallelModelMapper.updateByPrimaryKeySelective(parallelModelWithBLOBs);
    }

    @Override
    public void delete(String s) {
        parallelModelMapper.deleteByPrimaryKey(s);
    }

    @Override
    public boolean isExist(String s) {
        return parallelModelMapper.selectByPrimaryKey(s) != null;
    }

    @Override
    public List<ParallelModelWithBLOBs> getByPage(int offset, int size) {
        return parallelModelMapper.selectByPage(offset, size);
    }

    @Override
    public List<ParallelModel> getByIdList(Collection<String> idList) {
        return parallelModelMapper.selectByIdList(idList);
    }

    @Override
    public String getCmd(ParallelModelWithBLOBs model, String jobName, JSONArray params) {
        String cmd = null;
        if (model.getFrameworkType().equalsIgnoreCase("spark")) {
            if (model.getJarPath().split(",")[0].endsWith(".jar")) {
                cmd = setting.getSparkHome() +
                        "/bin/spark-submit" +
                        " --class " + model.getMainClass() +
                        " --master yarn --deploy-mode cluster --driver-memory " + model.getDriverMemory() +
                        " --num-executors " + model.getNumExecutors() +
                        " --executor-memory " + model.getExecutorMemory() +
                        " --executor-cores " + model.getExecutorCores() +
                        " --conf spark.default.parallelism=" + model.getParallelism() +
                        " --name " + jobName + " " +
                        model.getJarPath() + " " + params.join(" ");
            } else {//python
                cmd = setting.getSparkHome() +
                        "/bin/spark-submit" +
                        " --master yarn --deploy-mode cluster --driver-memory " + model.getDriverMemory() +
                        " --num-executors " + model.getNumExecutors() +
                        " --executor-memory " + model.getExecutorMemory() +
                        " --executor-cores " + model.getExecutorCores() +
                        " --conf spark.default.parallelism=" + model.getParallelism() +
                        " --name " + jobName +
                        " --jars " + setting.getJarPath() + "/" + queryConfig.getJar();
                if (model.getJarPath().contains(",")) {//.py,.zip
                    String[] arr = model.getJarPath().split(",");
                    cmd += " --py-files " + arr[1] + " " + arr[0];
                } else
                    cmd += " " + model.getJarPath();
                cmd += " " + params.join(" ");
            }
        } else if (model.getFrameworkType().equalsIgnoreCase("hadoop")
                || model.getFrameworkType().equalsIgnoreCase("mapreduce")) {
            cmd = setting.getHadoopHome() + "/bin/hadoop jar " + model.getJarPath() + " " + model.getMainClass()
                    + " -D mapreduce.job.name=" + jobName
                    + " " + params.join(" ");
        }
        return cmd;
    }

}
