package edu.zju.gis.dldsj.controller;

import edu.zju.gis.dldsj.common.Result;
import edu.zju.gis.dldsj.config.CommonSetting;
import edu.zju.gis.dldsj.constant.CodeConstants;
import edu.zju.gis.dldsj.entity.*;
import edu.zju.gis.dldsj.service.ParallelModelService;
import edu.zju.gis.dldsj.service.WfService;
import edu.zju.gis.dldsj.tasks.MonitorTasks;
import edu.zju.gis.dldsj.tasks.WorkflowMonitor;
import edu.zju.gis.dldsj.utils.*;
import lombok.extern.slf4j.Slf4j;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import scala.Tuple2;

import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
 * @author yanlong_lee@qq.com
 * @version 1.0 2018/11/07
 */
@Slf4j
@CrossOrigin
@Controller
@RequestMapping("/wf")
public class WorkflowController {
    @Autowired
    private CommonSetting setting;
    @Autowired
    private WfService wfService;
    @Autowired
    private MonitorTasks monitorTasks;
    @Autowired
    private ParallelModelService parallelModelService;

    /**
     * @param requestBody 工作流参数<br>
     *                    {
     *                    connections: 'id1,id2,id3#id4,id3#id3,id5,id6',
     *                    nodes:{
     *                    id1: {taskId: '',artifactId:'',params:[]},
     *                    ...,
     *                    id6: {taskId: '',artifactId:'',params:[]}
     *                    }}
     */
    @RequestMapping(value = "/post", method = RequestMethod.POST)
    @ResponseBody
    public String postDag(@SessionAttribute("userId") String userId, @RequestBody String requestBody) {
        try {
            JSONObject params = new JSONObject(requestBody);
            String dagId = params.getString("dagId");
            WfDag wfDag = new WfDag();
            wfDag.setDagId(dagId);
            wfDag.setDagName(params.optString("dagName", dagId));
            wfDag.setUserId(userId);
            wfDag.setTasks(params.getString("connections"));
            int retries = params.optInt("retries");

            //获取最终子任务的ID
            String endPointId = "";
            List<List<String>> chains = Arrays.stream(wfDag.getTasks().split("#")).map(s -> Arrays.asList(s.split(","))).collect(Collectors.toList());
            for (int i = 0; i < chains.size(); i++) {
                boolean isEnd = true;
                List<String> chain = chains.get(i);
                for (int j = 0; j < chains.size() && j != i; j++) {
                    int index = chains.get(j).indexOf(chain.get(chain.size() - 1));
                    if (index >= 0 && index < (chains.get(j).size() - 1)) {
                        isEnd = false;
                        break;
                    }
                }
                if (isEnd)
                    endPointId = chain.get(chain.size() - 1);
            }
            String endpoint = endPointId;

            //子任务节点参数构造
            Map<String, Tuple2<WfNode, JSONObject>> nodeConfigs = new HashMap<>();
            long currentTime = System.currentTimeMillis();
            String workspace = Paths.get(setting.getJobResultPath(), dagId, currentTime + "").toString();
            List<String> resultAddress = new ArrayList<>();
            params.getJSONArray("nodes").forEach(o -> {
                JSONObject node = (JSONObject) o;
                WfNode config = new WfNode();
                config.setTaskId(node.getString("taskId"));
                if (node.has("retries"))
                    config.setRetries(node.getInt("retries"));
                if (node.has("retryDelay"))
                    config.setRetryDelay(node.getInt("retryDelay"));
                if (node.has("maxRetryDelay"))
                    config.setMaxRetryDelay(node.getInt("maxRetryDelay"));
                if (node.has("executionTimeout"))
                    config.setExecutionTimeout(node.getInt("executionTimeout"));
                if (node.has("dependsOnPast"))
                    config.setDependsOnPast(node.getBoolean("dependsOnPast"));
                if (node.has("env"))
                    config.setEnv(node.getJSONObject("env").toString());
                config.setArtifactId(node.getString("artifactId"));
                JSONArray appParams = node.getJSONArray("params");
                int outputOrder = 0;
                for (int i = 0; i < appParams.length(); i++) {
                    if (appParams.getString(i).equals("$OUTPUT")) {//设置中间结果的输出位置
                        appParams.put(i, Paths.get(workspace, config.getTaskId() + "_" + outputOrder++).toString());
                    }
                }

                ParallelModelWithBLOBs model = parallelModelService.select(config.getArtifactId());
                node.put("model", model);
                if (endpoint.equals(config.getTaskId())) {//记录最终结果的输出路径
                    JSONArray paramsDesc = new JSONArray(model.getParameters());
                    for (int i = 0; i < paramsDesc.length(); i++)
                        if (paramsDesc.getJSONObject(i).has("out"))
                            resultAddress.add(appParams.getString(i));
                }
                nodeConfigs.put(config.getTaskId(), new Tuple2<>(config, node));
            });
            Pattern pattern = Pattern.compile("\\$([A-Za-z0-9_\\-]+)(_\\d)");
            nodeConfigs.forEach((taskId, tp) -> {//处理上游结果和下游输入的对应关系
                WfNode config = tp._1();
                JSONObject node = tp._2();
                JSONArray appParams = node.getJSONArray("params");
                for (int i = 0; i < appParams.length(); i++) {
                    Matcher matcher = pattern.matcher(appParams.getString(i));
                    if (matcher.matches()) {
                        if (matcher.groupCount() > 0) {
                            String upstreamId = matcher.group(1);
                            JSONObject upstreamNode = nodeConfigs.get(upstreamId)._2();
                            ParallelModelWithBLOBs upstreamModel = (ParallelModelWithBLOBs) upstreamNode.get("model");
                            JSONArray modelParams = new JSONArray(upstreamModel.getParameters());
                            String outTag = "0";
                            if (matcher.groupCount() == 2) {
                                outTag = matcher.group(2).substring(1);
                            }
                            for (int k = 0; k < modelParams.length(); k++) {
                                JSONObject modelParam = modelParams.getJSONObject(k);
                                if (modelParam.has("out") && modelParam.getString("out").equals(outTag))
                                    appParams.put(i, upstreamNode.getJSONArray("params").getString(k));
                            }
                        }
                    }
                }
                config.setParams(appParams.toString());
            });

            //将工作流python文件写入到airflow的指定位置
            Path filePath = Paths.get(setting.getDagsFolder(), dagId + ".py");
            wfDag.setFileLocation(filePath.toString());
            boolean dependsOnPast = params.optBoolean("dependsOnPast", true);
            buildDag(wfDag, dependsOnPast, retries, nodeConfigs.values().stream().map(Tuple2::_1).collect(Collectors.toSet()));
            AirflowDag airflowDag = new AirflowDag();
            airflowDag.setDagId(wfDag.getDagId());
            airflowDag.setIsPaused(false);//default true
            airflowDag.setIsSubdag(false);
            airflowDag.setIsActive(true);
            airflowDag.setLastSchedulerRun(DateUtil.now());
            airflowDag.setFileloc(wfDag.getFileLocation());
            airflowDag.setOwners(wfDag.getUserId());
            wfService.insert(airflowDag, wfDag);//直接插入到airflow元数据表中，以加快响应
            String startCmd = String.format("export AIRFLOW_HOME=%s;airflow unpause %s;airflow trigger_dag %s", setting.getAirflowHome(), dagId, dagId);
            SSHHelper.runSSH(setting.getNameNode(), setting.getUsername(), setting.getPassword(), startCmd, setting.getParallelFilePath());
            WorkflowMonitor monitor = new WorkflowMonitor(setting, wfService
                    , wfDag, nodeConfigs.values().stream().map(Tuple2::_1).collect(Collectors.toList()), workspace, String.join(",", resultAddress));
            monitorTasks.execute(monitor);
            return Result.success().setBody("").toString();
        } catch (Exception e) {
            log.error("工作流提交失败", e);
            return Result.error("工作流提交失败：" + e.getMessage()).toString();
        }
    }

    @RequestMapping(value = "/monitor/{dagId}", method = RequestMethod.GET)
    @ResponseBody
    public String monitor(@PathVariable String dagId) {
        Result<Map<String, Object>> result = new Result<>();
        try {
            Map<String, Object> body = new HashMap<>();
            List<WfRun> wfRuns = wfService.getWfRun(dagId);
            if (wfRuns.isEmpty()) {
                body.put("dagRun", null);
            } else {
                WfRun run = wfRuns.get(0);
                body.put("dagRun", run);
                List<WfInstance> wfInstances = wfService.getWfInstance(dagId, run.getExecutionDate());
                body.put("dagInstances", wfInstances);
            }
            result.setCode(CodeConstants.SUCCESS).setBody(body);
        } catch (Exception e) {
            log.error("获取任务状态失败", e);
            result.setCode(CodeConstants.SERVICE_ERROR).setMessage("获取任务状态失败：" + e.getMessage());
        }
        return result.toString();
    }

    @RequestMapping(value = "/result/{dagId}/download", method = RequestMethod.GET)
    public void download(@PathVariable String dagId, HttpServletResponse response) {
        List<WfRun> wfRuns = wfService.getWfRun(dagId);
        if (wfRuns.isEmpty()) {

        } else {
            WfRun wfRun = wfRuns.get(0);
            if (!checkLocalResult(wfRun)) {
                try {
                    response.getOutputStream().println("工作流任务`" + wfRun.getDagId() + "`的结果不存在或已被删除");
                } catch (IOException e) {
                    log.error("系统错误", e);
                }
                return;
            }
            final String fullPath = setting.getJobResultPath() + "/" + wfRun.getDagId();
            final String tarPath = fullPath + ".zip";
            File srcFile = new File(fullPath);
            File[] children = srcFile.listFiles();
            if (children == null || children.length == 0) {
                //空文件夹不会打包
                try {
                    new File(fullPath, "空文件夹").createNewFile();
                } catch (IOException e) {
                    log.error("文件创建失败", e);
                    response.setStatus(500);
                    try (PrintWriter writer = response.getWriter()) {
                        writer.println("文件下载失败");
                    } catch (IOException ex) {
                        log.error("系统错误", ex);
                    }
                }
            }

            ZipCompressorByAnt zip = new ZipCompressorByAnt(tarPath);
            zip.compressExe(fullPath, true);
            File file = new File(tarPath);
            try (InputStream fis = new BufferedInputStream(new FileInputStream(file)); OutputStream fos = response.getOutputStream()) {
                byte[] buffer = new byte[1024 * 1024 * 4];
                response.setContentType("application/octet-stream");
                //防止中文名乱码
                response.addHeader("Content-Disposition", "attachment;filename=" + new String(wfRun.getDagId().getBytes(), "ISO8859-1") + ".zip");
                response.addHeader("Content-Length", String.valueOf(file.length()));
                int length;
                while ((length = fis.read(buffer)) != -1)
                    fos.write(buffer, 0, length);
                FileUtil.deletePath(tarPath);
            } catch (IOException e) {
                log.error("文件下载失败", e);
                response.setStatus(500);
                try (PrintWriter writer = response.getWriter()) {
                    writer.println("文件下载失败：");
                    writer.println(e.getMessage());
                } catch (IOException ex) {
                    log.error("系统错误", ex);
                }
            }
        }
    }

    private boolean checkLocalResult(WfRun wfRun) throws IllegalArgumentException {
        File localFile = new File(setting.getJobResultPath(), wfRun.getDagId());
        if (!localFile.exists()) {
            localFile.mkdirs();
            String[] fullPaths = wfRun.getResult().split(",");
            HDFSHelper hdfsHelper = HDFSHelper.getInstance();//copy from hdfs
            org.apache.hadoop.fs.Path target = new org.apache.hadoop.fs.Path(localFile.getAbsolutePath());
            for (String fullPath : fullPaths) {
                org.apache.hadoop.fs.Path src = new org.apache.hadoop.fs.Path(fullPath);
                try {
                    if (hdfsHelper.exists(src))
                        hdfsHelper.copyToLocalFile(src, target);
                } catch (Exception e) {
                    log.error("文件`" + localFile.getAbsolutePath() + "`读取异常", e);
                }
            }
        }
        return localFile.listFiles() != null;
    }

    private void buildDag(WfDag wfDag, boolean dependsOnPast, int retries, Iterable<WfNode> nodeConfigs) throws IOException {
        List<String> content = new ArrayList<>();
        content.add(buildHead(wfDag.getUserId(), dependsOnPast, retries, wfDag.getDagId()));
        nodeConfigs.forEach(node -> content.add(node.buildTask(parallelModelService)));
        Arrays.stream(wfDag.getTasks().split("#")).map(s -> s.split(","))
                .forEach(conn -> content.add(String.join(" >> ", conn).replace("-", "_")));
        FileUtil.write(Paths.get(wfDag.getFileLocation()), content);
    }

    private static String buildHead(String user, boolean dependsOnPast, int retries, String dagId) {
        return String.format("# -*- encoding: UTF-8 -*-\n" +
                "from airflow import DAG\n" +
                "from airflow.operators.bash_operator import BashOperator\n" +
                "from datetime import datetime\n" +
                "from datetime import timedelta\n\n" +
                "default_args = {\n" +
                "    'owner': '%s',\n" +
                "    'depends_on_past': %s,\n" +
                "    'start_date': datetime(2018,8,23),\n" +
                "    'retries': %d,\n" +
                "    'retry_delay': timedelta(minutes=1)\n" +
                "}\n\n" +
                "dag = DAG(\n" +
                "    dag_id='%s',\n" +
                "    schedule_interval=None,\n" +
                "    default_args=default_args\n" +
                ")\n\n", user, dependsOnPast ? "True" : "False", retries, dagId);
    }
}
