package com.panyy.controller;

import com.panyy.config.SparkProperties;
import com.panyy.constants.SparkConstant;
import org.apache.spark.launcher.SparkAppHandle;
import org.apache.spark.launcher.SparkLauncher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;

@RestController
@RequestMapping("/launcher")
public class SparkDemoController {
    @Autowired
    private SparkProperties sparkProperties;

    @RequestMapping(value = "/kafkaInte",method = {RequestMethod.GET,RequestMethod.POST})
    public String kafkaInte() throws IOException, InterruptedException {
        Map<String,String> env = new HashMap<>();
        env.put("HADOOP_CONF_DIR",sparkProperties.getHadoopConfDir());
        env.put("JAVA_HOME",sparkProperties.getJavaHome());
        CountDownLatch countDownLatch = new CountDownLatch(1);

        SparkAppHandle handle = new SparkLauncher(env)
                .setSparkHome(sparkProperties.getSparkHome())
                .setAppResource(sparkProperties.getAppResource())
                .setMainClass(sparkProperties.getMainClass())
                .setMaster(sparkProperties.getMaster())
                .setDeployMode(sparkProperties.getDeployMode())
                .setConf(SparkConstant.SPARK_DRIVER_MEMORY, sparkProperties.getDriverMemory())
                .setConf(SparkConstant.SPARK_EXECUTOR_MEMORY, sparkProperties.getExecutorMemory())
                .setConf(SparkConstant.SPARK_EXECUTOR_CORES, sparkProperties.getExecutorCores())
                .setConf(SparkConstant.SPARK_YARN_QUEUE, sparkProperties.getYarnQueue())
                .setVerbose(true)
                .startApplication(new SparkAppHandle.Listener() {
                    @Override
                    public void stateChanged(SparkAppHandle handle) {
                        if (handle.getState().isFinal()){
                            countDownLatch.countDown();
                        }
                        System.out.println("状态:"+handle.getState().toString());
                    }

                    @Override
                    public void infoChanged(SparkAppHandle handle) {
                        System.out.println("信息："+handle.getState().toString());
                    }
                });

        System.out.println("任务正在执行中,请稍后。。。。。。。。。。。。");
        countDownLatch.await();
        System.out.println("任务运行结束了。。。。。。。。。。");
        return "{\"success\":1}";
    }
}
