package com.example.sparksubmitter.controller;

import com.example.sparksubmitter.dto.SparkSqlRequest;
import com.example.sparksubmitter.dto.SparkSqlResponse;
import com.example.sparksubmitter.service.HadoopService;
import com.example.sparksubmitter.service.SparkService;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;

import javax.validation.Valid;
import java.io.IOException;
import java.util.concurrent.CompletableFuture;

@RestController
@RequestMapping("/api/spark")
public class BigdataController {

    @Autowired
    private SparkService sparkService;

    @Autowired
    private HadoopService hadoopService;


    /**
     * 异步提交 Spark SQL 任务
     */
    @PostMapping("/submit")
    public ResponseEntity<CompletableFuture> submitSql(@Valid @RequestBody SparkSqlRequest request) {
        CompletableFuture<SparkSqlResponse> future = sparkService.submitSql(request);
        return new ResponseEntity<>(future, HttpStatus.ACCEPTED);
    }

    /**
     * 获取任务状态
     */
    @GetMapping("/status/{taskId}")
    public ResponseEntity<SparkSqlResponse> getTaskStatus(@PathVariable String taskId) {
        SparkSqlResponse response = sparkService.getTaskStatus(taskId);
        return new ResponseEntity<SparkSqlResponse>(response, HttpStatus.OK);
    }

    /**
     * 取消任务
     */
    @PostMapping("/cancel/{taskId}")
    public ResponseEntity<SparkSqlResponse> cancelTask(@PathVariable String taskId) {
        SparkSqlResponse response = sparkService.cancelTask(taskId);

        YarnConfiguration yarnConfig = new YarnConfiguration();
        yarnConfig.set("hadoop.security.authentication", "simple");

        try {
            // 1. 创建 YARN 客户端
            YarnClient yarnClient = YarnClient.createYarnClient();
            yarnClient.init(yarnConfig);
            yarnClient.start();
            // 2. 取消任务
            yarnClient.killApplication(ApplicationId.fromString(taskId));
            yarnClient.close();
        } catch (YarnException e) {
            throw new RuntimeException(e);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        return new ResponseEntity<SparkSqlResponse>(response, HttpStatus.OK);
    }


    @GetMapping("/test/hadoop")
    public ResponseEntity<SparkSqlResponse> testHadoop() {
        SparkSqlResponse response = null;
        try {
            hadoopService.validateHdfsConnection();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        return new ResponseEntity<SparkSqlResponse>(response, HttpStatus.OK);
    }

}    