package com.zhengyisky.service;

import com.zhengyisky.entity.Data;
import com.zhengyisky.mapper.DataMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Service;

import java.io.*;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;

@Service
public class DataExportService {
    @Autowired
    private DataMapper dataMapper;
    @Autowired
    private ThreadPoolTaskExecutor taskExecutor;

    public void exportLargeData(String outputPath, int batchSize) throws Exception {
        // 1. 获取数据范围
        Long minId = dataMapper.selectMinId();
        Long maxId = dataMapper.selectMaxId();

        // 2. 创建临时文件集合
        List<File> tempFiles = Collections.synchronizedList(new ArrayList<>());

        // 3. 提交多线程任务
        Integer threadCount = 20;

        //创建一个数组，用来装线程任务
        CompletableFuture[] completableFutures = new CompletableFuture[threadCount];

        //   maxId 500   minId 1   500-1+1
        Long segment = (maxId - minId + 1) / threadCount;  // 按线程数切分区间

        for (int i = 0; i < threadCount; i++) {
            //计算每个线程的任务区间
            Long start = minId + i * segment;
            Long end = (i == threadCount-1) ? maxId : start + segment - 1;


            int finalI = i;
            CompletableFuture<Void> voidCompletableFuture = CompletableFuture.runAsync(() -> {
                try {
                    File tempFile = processSegment(start, end, batchSize);
                    tempFiles.add(tempFile);

                    //将临时文件写入磁盘
//                    OutputStream os = Files.newOutputStream(Paths.get(outputPath + (finalI) + ".csv"));
//                    Files.copy(tempFile.toPath(), os); // 正确匹配字节流参数‌:ml-citation{ref="2,4" data="citationList"}
//                    tempFile.delete();

                } catch (IOException e) {
                    throw new RuntimeException(e);
                }


            }, taskExecutor);

            //将线程任务加入数组
            completableFutures[i] = voidCompletableFuture;
        }

        // 4. 等待所有任务完成，自带阻塞
        CompletableFuture.allOf(completableFutures).get();

        // 5. 合并临时文件
        mergeFiles(tempFiles, outputPath);


    }

    private File processSegment(Long startId, Long endId, int batchSize) throws IOException {
        File tempFile = File.createTempFile("export-", ".csv");
        try (BufferedWriter writer = new BufferedWriter(new FileWriter(tempFile))) {
            Long currentId = startId;
            while (currentId <= endId) {
                List<Data> batch = dataMapper.selectBatch(currentId, batchSize);
                if (batch.isEmpty()) break;

                for (Data data : batch) {
                    writer.write(data.toCsvRow());
                    writer.newLine();
                }
                currentId = batch.get(batch.size()-1).getId() + 1;
            }
        }
        return tempFile;
    }

    private void mergeFiles(List<File> tempFiles, String outputPath) throws IOException {
        FileOutputStream fos = new FileOutputStream(outputPath+"data.zip");
        ZipOutputStream zos = new ZipOutputStream(fos);

        for (File tempFile : tempFiles) {
            addTempFileToZip(tempFile, zos);
        }

        // 关闭ZIP文件
        zos.close();
        fos.close();
    }

    private static void addTempFileToZip(File tempFile, ZipOutputStream zos) throws IOException {
        ZipEntry zipEntry = new ZipEntry(tempFile.getName());
        zos.putNextEntry(zipEntry);

        FileInputStream fis = new FileInputStream(tempFile);
        byte[] buffer = new byte[1024];
        int length;
        while ((length = fis.read(buffer)) > 0) {
            zos.write(buffer, 0, length);
        }

        fis.close();
        tempFile.delete();
    }
}

