package com.example.demo.controller;

import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.ConcurrentHashMap;

@Slf4j
@RestController
@RequestMapping("/api/files2")
public class FileController2 {


    @GetMapping("/csv/process")
    public String processCsvFiles() {
        AtomicLong totalLines = new AtomicLong(0);
        AtomicLong validLines = new AtomicLong(0);
        
        try {
            // 创建输出目录
            File outputDir = new File("/data/processed");
            if (!outputDir.exists()) {
                outputDir.mkdirs();
            }
            
            // 创建输出文件
            String outputFile = "/data/processed/merged_data.txt";
            BufferedWriter writer = new BufferedWriter(new FileWriter(outputFile));
            
            File directory = new File("/data/seaweedfs");
            if (directory.exists() && directory.isDirectory()) {
                File[] files = directory.listFiles((dir, name) -> name.toLowerCase().endsWith(".csv"));
                
                if (files != null) {
                    log.info("开始处理，共发现 {} 个CSV文件", files.length);
                    
                    // 创建线程池，核心线程数设为CPU核心数
                    int processors = Runtime.getRuntime().availableProcessors();
                    ExecutorService executorService = Executors.newFixedThreadPool(processors);
                    CountDownLatch latch = new CountDownLatch(files.length);
                    
                    // 提交所有文件处理任务
                    for (int i = 0; i < files.length; i++) {
                        final File file = files[i];
                        final int fileIndex = i;
                        executorService.submit(() -> {
                            try {
                                long fileLines = 0;
                                long fileValidLines = 0;
                                long startTime = System.currentTimeMillis();
                                
                                try (BufferedReader reader = new BufferedReader(new FileReader(file))) {
                                    String line;
                                    while ((line = reader.readLine()) != null) {
                                        totalLines.incrementAndGet();
                                        fileLines++;
                                        String[] columns = line.split(",");
                                        if (columns.length >= 3) {
                                            String secondColumn = columns[1].replaceAll("\"", "");
                                            String thirdColumn = columns[2].replaceAll("\"", "");
                                            
                                            // 检查条件：第二列包含.
                                            if (secondColumn.contains(".")) {
                                                validLines.incrementAndGet();
                                                fileValidLines++;
                                                
                                                // 合并第三列和第二列，用/连接
                                                String mergedData = thirdColumn + "/" + secondColumn;
                                                
                                                // 写入文件
                                                synchronized (writer) {
                                                    writer.write(mergedData);
                                                    writer.newLine();
                                                }
                                            }
                                        }
                                    }
                                }
                                
                                long endTime = System.currentTimeMillis();
                                log.info("文件 {}/{}: {} - 处理完成，总行数: {}，有效行数: {}，耗时: {}ms", 
                                    fileIndex + 1, files.length, file.getName(), fileLines, fileValidLines, endTime - startTime);
                                
                            } catch (Exception e) {
                                log.error("处理文件失败: {}", file.getName(), e);
                            } finally {
                                latch.countDown();
                            }
                        });
                    }
                    
                    // 等待所有文件处理完成
                    latch.await();
                    executorService.shutdown();
                    
                    // 关闭writer
                    writer.close();
                    
                    log.info("统计--------------------------------------------------------------");
                    log.info("所有文件处理完成，总行数: {}", totalLines.get());
                    log.info("符合条件的数据行数: {}", validLines.get());
                    log.info("数据已保存到文件: {}", outputFile);
                }
            }
        } catch (Exception e) {
            log.error("处理文件失败", e);
        }
        
        return "success";
    }

    @GetMapping("/csv/analyze")
    public String analyzeMergedData() {
        AtomicLong totalLines = new AtomicLong(0);
        ConcurrentHashMap<String, List<Long>> categoryStats = new ConcurrentHashMap<>();
        
        try {
            File mergedFile = new File("/data/processed/merged_data.txt");
            if (!mergedFile.exists()) {
                return "合并文件不存在: /data/processed/merged_data.txt";
            }
            
            log.info("开始分析合并文件...");
            
            try (BufferedReader reader = new BufferedReader(new FileReader(mergedFile))) {
                String line;
                long lineNumber = 0;
                while ((line = reader.readLine()) != null) {
                    lineNumber++;
                    totalLines.incrementAndGet();
                    
                    // 获取前20个字符作为分类依据
                    String category = line.length() > 20 ? line.substring(0, 20) : line;
                    
                    // 将行号添加到对应分类的列表中
                    categoryStats.computeIfAbsent(category, k -> new ArrayList<>()).add(lineNumber);
                    
                    // 每处理100万行打印一次进度
                    if (lineNumber % 1000000 == 0) {
                        log.info("已处理 {} 行数据", lineNumber);
                    }
                }
            }
            
            // 统计并打印结果
            log.info("统计结果--------------------------------------------------------------");
            log.info("总行数: {}", totalLines.get());
            log.info("不同类别数: {}", categoryStats.size());
            
            // 按每个类别的数据量排序
            categoryStats.entrySet().stream()
                .sorted((e1, e2) -> Integer.compare(e2.getValue().size(), e1.getValue().size()))
                .forEach(entry -> {
                    String category = entry.getKey();
                    List<Long> lineNumbers = entry.getValue();
                    log.info("类别: {}", category);
                    log.info("  数据量: {}", lineNumbers.size());
                    log.info("  行号范围: {} - {}", lineNumbers.get(0), lineNumbers.get(lineNumbers.size() - 1));
                    log.info("  示例行号: {}", lineNumbers.subList(0, Math.min(5, lineNumbers.size())));
                    log.info("--------------------------------------------------------------");
                });
            
        } catch (Exception e) {
            log.error("分析文件失败", e);
            return "error: " + e.getMessage();
        }
        
        return "success";
    }
} 