package com.exam.demo;

import com.exam.demo.job.HadoopJobRunner;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;

import java.io.InputStream;
import java.net.URI;

/**
 * @author SongYueSheng
 * @date 2024/6/7 10:59
 */
@RestController
@CrossOrigin(origins = "*")  // 允许跨域请求
public class DemoController {

    private static final String HDFS_URI = "hdfs://192.168.127.131:8020";
    private static final String HDFS_INPUT_PATH = "/input/";
    private static final String HDFS_OUTPUT_PATH = "/output/";

    @PostMapping("/upload")
    public String handleFileUpload(@RequestParam("file") MultipartFile file) throws Exception {
        // 配置HDFS
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", HDFS_URI);
        System.setProperty("HADOOP_USER_NAME", "root");
        FileSystem fs = FileSystem.get(new URI(HDFS_URI), conf);

        // 上传文件到HDFS
        String inputFilePath = HDFS_INPUT_PATH + file.getOriginalFilename();
        try (InputStream in = file.getInputStream()) {
            Path outputPath = new Path(inputFilePath);
            if (fs.exists(outputPath)) {
                fs.delete(outputPath, true);
            }
            IOUtils.copyBytes(in, fs.create(outputPath), conf);
        }

        // 调用Hadoop MapReduce程序
        String outputPath = HDFS_OUTPUT_PATH + "result";
        HadoopJobRunner jobRunner = new HadoopJobRunner();
        jobRunner.runJob(inputFilePath, outputPath, conf);

        // 读取处理结果
        Path resultFile = new Path(outputPath + "/part-r-00000");
        StringBuilder resultContent = new StringBuilder();
        try (InputStream resultStream = fs.open(resultFile)) {
            byte[] buffer = new byte[1024];
            int bytesRead;
            while ((bytesRead = resultStream.read(buffer)) > 0) {
                resultContent.append(new String(buffer, 0, bytesRead));
            }
        }

        return resultContent.toString();
    }
}