package edu.zju.gis.dbfg.server.service.impl;


import com.github.pagehelper.PageHelper;
import edu.zju.gis.dbfg.common.Page;
import edu.zju.gis.dbfg.common.base.BaseServiceImpl;
import edu.zju.gis.dbfg.server.config.CommonSetting;
import edu.zju.gis.dbfg.server.mapper.ParallelModelMapper;
import edu.zju.gis.dbfg.server.model.ParallelModel;
import edu.zju.gis.dbfg.server.model.ParallelModelWithBLOBs;
import edu.zju.gis.dbfg.server.model.vo.PublicModelsInfo;
import edu.zju.gis.dbfg.server.service.ParallelModelService;
import org.json.JSONArray;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.Collection;
import java.util.List;

@Service
public class ParallelModelServiceImpl extends BaseServiceImpl<ParallelModelMapper, ParallelModelWithBLOBs, String> implements ParallelModelService {
    @Autowired
    private CommonSetting setting;
    @Autowired
    private ParallelModelMapper parallelModelMapper;

    @Override
    public ParallelModelWithBLOBs select(String pk) {
        return parallelModelMapper.selectByPrimaryKey(pk);
    }

    public int insert(ParallelModelWithBLOBs parallelModelWithBLOBs) {
        return parallelModelMapper.insertSelective(parallelModelWithBLOBs);
    }

    @Override
    public int update(ParallelModelWithBLOBs parallelModelWithBLOBs) {
        return parallelModelMapper.updateByPrimaryKeySelective(parallelModelWithBLOBs);
    }

    @Override
    public int delete(String s) {
        return parallelModelMapper.deleteByPrimaryKey(s);
    }

    @Override
    public boolean isExist(String s) {
        return parallelModelMapper.selectByPrimaryKey(s) != null;
    }

    public List<ParallelModelWithBLOBs> getByPage(int offset, int size) {
        return parallelModelMapper.selectByPage(offset, size);
    }

    @Override
    public List<ParallelModel> getByIdList(Collection<String> idList) {
        if (idList.size() > 0)
            return parallelModelMapper.selectByIdList(idList);
        else
            return null;
    }

    @Override
    public Page<PublicModelsInfo> getAll(String userId,Page page) {
        PageHelper.startPage(page.getPageNo(), page.getPageSize());
        return new Page<>(parallelModelMapper.selectAll(userId));
    }

    @Override
    public Page<ParallelModel> searchByClass(Collection<String> idList, Page page) {
        PageHelper.startPage(page.getPageNo(), page.getPageSize());
        return new Page<>(parallelModelMapper.selectByIdList(idList));
    }

    @Override
    public String getCmd(ParallelModelWithBLOBs model, String jobName, List<String> params, List<String> envSetting) {
        String cmd = null;
        String driverMemory = envSetting.get(0);
        String numExecutors = envSetting.get(1);
        String executorMemory = envSetting.get(2);
        String executorCores = envSetting.get(3);
        String paramStr = "";
        for (int i = 0; i < params.size(); i++) {
            paramStr = paramStr + params.get(i) + " ";
        }
        if (model.getFrameworkType().equalsIgnoreCase("spark")) {
            if (model.getJarPath().split(",")[0].endsWith(".jar")) {
                cmd = setting.getSparkHome() +
                        "/bin/spark-submit" +
                        " --class " + model.getMainClass() +
                        " --master yarn --deploy-mode cluster --driver-memory " + driverMemory +
                        " --num-executors " + numExecutors +
                        " --executor-memory " + executorMemory +
                        " --executor-cores " + executorCores +
//                        " --conf spark.default.parallelism=" + model.getParallelism() +
                        " --name " + jobName + " " +
                        model.getJarPath() + " " + paramStr;
            } else {//python
                cmd = setting.getSparkHome() +
                        "/bin/spark-submit" +
                        " --master yarn --deploy-mode cluster --driver-memory " + driverMemory +
                        " --num-executors " + numExecutors +
                        " --executor-memory " + executorMemory +
                        " --executor-cores " + executorCores +
//                        " --conf spark.default.parallelism=" + model.getParallelism() +
                        " --name " + jobName;
//                        " --jars " + setting.getJarPath() + "/" + queryConfig.getJar();
                if (model.getJarPath().contains(",")) {//.py,.zip
                    String[] arr = model.getJarPath().split(",");
                    cmd += " --py-files " + arr[1] + " " + arr[0];
                } else
                    cmd += " " + model.getJarPath();
                cmd += " " + paramStr;
            }
        } else if (model.getFrameworkType().equalsIgnoreCase("hadoop")
                || model.getFrameworkType().equalsIgnoreCase("mapreduce")) {
            cmd = setting.getHadoopHome() + "/bin/hadoop jar " + model.getJarPath() + " " + model.getMainClass()
                    + " -D mapreduce.job.name=" + jobName
                    + " " + paramStr;
        }
        return cmd;
    }


    @Override
    public Page<PublicModelsInfo> selectByUserId(String userId, Page page) {
        PageHelper.startPage(page.getPageNo(), page.getPageSize());
        return new Page<>(parallelModelMapper.selectByUserId(userId));
    }
}
