package my.io.hadoop.controller;

import cn.dev33.satoken.util.SaResult;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.lang.Dict;
import cn.hutool.core.text.csv.*;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.StrUtil;
import lombok.RequiredArgsConstructor;
import my.io.hadoop.jobs.JobComponent;
import my.io.hadoop.jobs.hdfs.HdfsJobByMonth;
import my.io.hadoop.vo.HdfsUploadJsonVO;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * @author Xin
 * @date 2024/03/27
 */
@RestController
@RequestMapping("/hdfs")
@RequiredArgsConstructor
public class HdfsController {

    private final FileSystem fs;

    /**
     * @return {@link SaResult}
     */
    @PostMapping("/upload")
    public SaResult upload(MultipartFile file, String name) throws IOException {
        name = StrUtil.blankToDefault(name, JobComponent.JOB_NAME);
        File localFile = new File(name + "-" + IdUtil.fastSimpleUUID());
        try (InputStream in = file.getInputStream();
             OutputStream out = Files.newOutputStream(localFile.toPath())){
            IoUtil.copy(in, out);
        }
        fs.copyFromLocalFile(true, true, new Path(localFile.getPath()), new Path(HdfsJobByMonth.HDFS_INPUT_BASE_PATH, name));
        return SaResult.ok();
    }

    @PostMapping("/upload-by-json")
    public SaResult uploadByJson(@RequestBody HdfsUploadJsonVO jsonVO) throws IOException {
        File file = new File(jsonVO.getName() + "-" + IdUtil.fastSimpleUUID());
        String[] type = new String[0];
        try (CsvWriter writer = CsvUtil.getWriter(file, StandardCharsets.UTF_8, false)) {
            writer.writeLine(jsonVO.getHeader().toArray(type));
            for (Dict dict : jsonVO.getData()) {
                List<String> row = jsonVO.getHeader().stream()
                        .map(dict::getStr)
                        .collect(Collectors.toList());
                writer.writeLine(row.toArray(type));
            }
        }
        fs.copyFromLocalFile(true, true, new Path(file.getPath()), new Path(HdfsJobByMonth.HDFS_INPUT_BASE_PATH, jsonVO.getName()));
        return SaResult.ok();
    }

    @GetMapping("/read-csv")
    public String readCsv(String path) throws IOException {
        try (FSDataInputStream in = fs.open(new Path(path))) {
            return IoUtil.readUtf8(in);
        }
    }

    @GetMapping("/read-json")
    public List<Map<String, String>> readJson(String path) throws IOException {

        try (FSDataInputStream in = fs.open(new Path(path));
             InputStreamReader reader = new InputStreamReader(in)) {
            CsvConfig<?> config = new CsvConfig<>();
            config.setFieldSeparator(',');
            List<Map<String, String>> maps = CsvUtil.getReader().readMapList(reader);
            return maps;
        }
    }
}
