package com.exam.demo.controller;

import com.exam.demo.commom.Result;
import com.exam.demo.job.HadoopJobRunner;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;

/**
 * @author SongYueSheng
 * @date 2024/6/8 11:47
 */
@RestController
@CrossOrigin(origins = "*")  // 允许跨域请求
@EnableAsync
public class FileUploadController {

    private static final String HDFS_URI = "hdfs://192.168.127.131:8020";
    private static final String HDFS_INPUT_PATH = "/input/";
    private static final String HDFS_OUTPUT_PATH = "/output/";

    private AtomicLong taskIdGenerator = new AtomicLong(1);
    private ConcurrentHashMap<Long, List<ResultData>> taskResults = new ConcurrentHashMap<>();
    private ConcurrentHashMap<Long, String> taskStatus = new ConcurrentHashMap<>();

    @PostMapping("/upload")
    public Result<?> handleFileUpload(@RequestParam("file") MultipartFile file) {
        Long taskId = taskIdGenerator.getAndIncrement();
        taskStatus.put(taskId, "IN_PROGRESS");

        // 保存文件到本地临时目录
        File tempFile;
        try {
            tempFile = File.createTempFile("upload_", "_" + file.getOriginalFilename());
            try (InputStream in = file.getInputStream();
                 FileOutputStream out = new FileOutputStream(tempFile)) {
                byte[] buffer = new byte[1024];
                int bytesRead;
                while ((bytesRead = in.read(buffer)) > 0) {
                    out.write(buffer, 0, bytesRead);
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("File upload failed: " + e.getMessage());
        }

        CompletableFuture.runAsync(() -> {
            try {
                processFile(tempFile, taskId);
            } catch (Exception e) {
                taskStatus.put(taskId, "FAILED");
                e.printStackTrace();
            } finally {
                // 删除临时文件
                tempFile.delete();
            }
        });

        return Result.success(taskId);
    }

    @Async
    public void processFile(File localFile, Long taskId) throws Exception {
        // 配置HDFS
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", HDFS_URI);
        System.setProperty("HADOOP_USER_NAME", "root");
        FileSystem fs = FileSystem.get(new URI(HDFS_URI), conf);

        // 上传文件到HDFS
        String inputFilePath = HDFS_INPUT_PATH + localFile.getName();
        try (InputStream in = Files.newInputStream(localFile.toPath())) {
            Path outputPath = new Path(inputFilePath);
            if (fs.exists(outputPath)) {
                fs.delete(outputPath, true);
            }
            IOUtils.copyBytes(in, fs.create(outputPath), conf);
        }

        // 调用Hadoop MapReduce程序
        String outputPath = HDFS_OUTPUT_PATH + "result_" + taskId;
        HadoopJobRunner jobRunner = new HadoopJobRunner();
        jobRunner.runJob(inputFilePath, outputPath, conf);

        // 读取处理结果
        Path resultFile = new Path(outputPath + "/part-r-00000");
        List<ResultData> resultDataList = new ArrayList<>();
        try (InputStream resultStream = fs.open(resultFile);
             BufferedReader reader = new BufferedReader(new InputStreamReader(resultStream))) {
            String line;
            while ((line = reader.readLine()) != null) {
                String[] parts = line.split("\t");
                if (parts.length == 2) {
                    resultDataList.add(new ResultData(parts[0], parts[1]));
                }
            }
        }

        taskResults.put(taskId, resultDataList);
        taskStatus.put(taskId, "COMPLETED");
    }

    @PostMapping("/result")
    public Result<?> getResult(@RequestParam("taskId") Long taskId) {
        String status = taskStatus.get(taskId);
        if ("COMPLETED".equals(status)) {
            return Result.success(taskResults.get(taskId));
        } else if ("IN_PROGRESS".equals(status)) {
            return Result.success("Task is still in progress.");
        } else {
            return Result.error("Task failed.");
        }
    }

    private static class ResultData {
        private String date;
        private String temperature;

        public ResultData(String date, String temperature) {
            this.date = date;
            this.temperature = temperature;
        }

        public String getDate() {
            return date;
        }

        public void setDate(String date) {
            this.date = date;
        }

        public String getTemperature() {
            return temperature;
        }

        public void setTemperature(String temperature) {
            this.temperature = temperature;
        }
    }
}
