//package com.iscas.dataaggregation.utils;
//
//import com.fasterxml.jackson.databind.JsonNode;
//import com.fasterxml.jackson.databind.ObjectMapper;
//import com.iscas.dataaggregation.domain.experiment.OtelLogs;
//import com.iscas.dataaggregation.domain.experiment.OtelTraces;
//import com.iscas.dataaggregation.domain.experiment.Record;
//import com.iscas.dataaggregation.mapper.mysql.FaultcorrelationMapper;
//import com.iscas.dataaggregation.mapper.mysql.RecordMapper;
//import com.iscas.dataaggregation.service.OtelTracesService;
//
//import org.apache.log4j.Logger;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.beans.factory.annotation.Value;
//import org.springframework.stereotype.Component;
//import org.zeroturnaround.zip.ZipUtil;
//
//import java.io.File;
//import java.io.IOException;
//import java.util.*;
//import java.util.concurrent.ExecutorService;
//import java.util.concurrent.Executors;
//import java.util.concurrent.Future;
//
//@Component
//public class ChaosDataProcessor {
//
//    private static final Logger logger = Logger.getLogger(ChaosDataProcessor.class);
//
//    @Autowired
//    private FaultcorrelationMapper faultCorrelationMapper;
//
//    @Autowired
//    private ChaosFaultUtils chaosFaultUtils;
//
//    @Autowired
//    private ServicesUtils servicesUtils;
//
//    @Autowired
//    private MetricsUtils metricsUtils;
//
//    @Autowired
//    private LogsUtils logsUtils;
//
//    @Autowired
//    private OtelTracesService otelTracesService;
//
//    @Autowired
//    private RecordMapper recordMapper;
//
//    @Value("${file.upload-dir}")
//    private String uploadDir;
//
//    @Value("${file.download-dir}")
//    private String downloadDir;
//    private final ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
//
//    public String processPlan(int planId, String namespace) {
//        String basePath = uploadDir;
//        String timestamp = String.valueOf(System.currentTimeMillis());
//        String zipFileName = planId + "_" + timestamp + ".zip";
//        String zipFilePath = basePath + zipFileName;
//
//        try {
//            logger.info("Starting processing for planId: " + planId);
//
//            // Step 1: Process Basic Information
//            logger.info("Fetching fault configuration IDs for planId: " + planId);
//            List<Integer> faultConfigIds = faultCorrelationMapper.findFaultConfigIdsByPlanId(planId);
//
//            List<Map<String, String>> basicInfo = new ArrayList<>();
//            for (Integer faultConfigId : faultConfigIds) {
//                logger.info("Processing faultConfigId: " + faultConfigId);
//                String faultName = chaosFaultUtils.getChaosNameByConfigId(faultConfigId, "Inject");
//                String nodeOrService = chaosFaultUtils.getNodeOrServiceNameByFaultConfigId(faultConfigId);
//                Map<String, String> faultInfo = new HashMap<>();
//                faultInfo.put("fault_name", faultName);
//                faultInfo.put("node_or_service", nodeOrService);
//                basicInfo.add(faultInfo);
//            }
//
//            // Step 2: Get Records and Process Timestamps
//            logger.info("Fetching records for planId: " + planId);
//            List<Record> records = recordMapper.findRecordsByPlanId(planId);
//
//            // Step 3: Get All Service Names
//            logger.info("Fetching all service names in namespace: " + namespace);
//            List<String> serviceNames = servicesUtils.getServiceNamesByNamespace(namespace);
//
//            // Step 4: Process Records
//            File planDir = new File(basePath + planId);
//            if (!planDir.exists() && !planDir.mkdirs()) {
//                throw new IOException("Failed to create directory: " + planDir.getPath());
//            }
//
//            File basicInfoFile = new File(planDir, "basic_info.json");
//            ObjectMapper objectMapper = new ObjectMapper();
//            objectMapper.writeValue(basicInfoFile, basicInfo);
//            logger.info("Basic information written to: " + basicInfoFile.getPath());
//
//            // Use multi-threading to process records
//            List<Future<Void>> futures = new ArrayList<>();
//            for (Record record : records) {
//                futures.add(executorService.submit(() -> {
//                    processRecord(record, planDir, namespace, serviceNames, objectMapper);
//                    return null;
//                }));
//            }
//
//            // Wait for all tasks to complete
//            for (Future<Void> future : futures) {
//                future.get();
//            }
//
//            // Compress the directory
//            logger.info("Compressing directory: " + planDir.getPath());
//            compressDirectory(planDir, zipFilePath);
//            logger.info("Data processing and compression completed successfully.");
//
//            return downloadDir + zipFileName;
//
//        } catch (Exception e) {
//            logger.error("Error processing plan: " + planId, e);
//            throw new RuntimeException("Error processing plan: " + planId, e);
//        } finally {
//            executorService.shutdown();
//        }
//    }
//
//    private void processRecord(Record record, File planDir, String namespace, List<String> serviceNames, ObjectMapper objectMapper) throws IOException {
//        long startTime = record.getStart_time().getTime();
//        long endTime = record.getEnd_time().getTime();
//        File recordDir = new File(planDir, String.valueOf(record.getId()));
//
//        if (!recordDir.exists() && !recordDir.mkdirs()) {
//            throw new IOException("Failed to create directory: " + recordDir.getPath());
//        }
//
//        // Trace data
//        logger.info("Fetching trace data for recordId: " + record.getId());
//        List<OtelTraces> traces = otelTracesService.findTracesByTimeAndNamespace(startTime, endTime, namespace);
//        File traceFile = new File(recordDir, "trace.json");
//        objectMapper.writeValue(traceFile, traces);
//        logger.info("Trace data written to: " + traceFile.getPath());
//
//        // Metrics and Logs data
//        List<Map<String, Object>> metricsData = Collections.synchronizedList(new ArrayList<>());
//        List<Map<String, Object>> logsData = Collections.synchronizedList(new ArrayList<>());
//
//        // Process serviceNames in parallel
//        List<Future<Void>> serviceFutures = new ArrayList<>();
//        for (String serviceName : serviceNames) {
//            serviceFutures.add(executorService.submit(() -> {
//                processServiceData(serviceName, namespace, startTime, endTime, metricsData, logsData);
//                return null;
//            }));
//        }
//
//        // Wait for service processing to complete
//        for (Future<Void> future : serviceFutures) {
//            try {
//                future.get();
//            } catch (Exception e) {
//                logger.error("Error processing service data", e);
//            }
//        }
//
//        File metricsFile = new File(recordDir, "metrics.json");
//        objectMapper.writeValue(metricsFile, metricsData);
//        logger.info("Metrics data written to: " + metricsFile.getPath());
//
//        File logsFile = new File(recordDir, "logs.json");
//        objectMapper.writeValue(logsFile, logsData);
//        logger.info("Logs data written to: " + logsFile.getPath());
//    }
//
//    private void processServiceData(String serviceName, String namespace, long startTime, long endTime,
//                                    List<Map<String, Object>> metricsData, List<Map<String, Object>> logsData) {
//        try {
//            logger.info("Processing service: " + serviceName);
//
//            JsonNode metrics = metricsUtils.getMetricsForService(namespace, serviceName, startTime, endTime);
//            Map<String, Object> metricEntry = new HashMap<>();
//            metricEntry.put("service_name", serviceName);
//            metricEntry.put("data", metrics);
//            metricsData.add(metricEntry);
//            logger.debug("Metrics data added for service: " + serviceName);
//
//            List<OtelLogs> logs = logsUtils.getLogs(startTime, endTime, namespace, serviceName);
//            Map<String, Object> logEntry = new HashMap<>();
//            logEntry.put("service_name", serviceName);
//            logEntry.put("data", logs);
//            logsData.add(logEntry);
//            logger.debug("Logs data added for service: " + serviceName);
//
//        } catch (Exception e) {
//            logger.error("Error processing service: " + serviceName, e);
//        }
//    }
//    public static void compressDirectory(File sourceDir, String outputZip) throws IOException {
//        File zipFile = new File(outputZip);
//        File parentDir = zipFile.getParentFile();
//        if (!parentDir.exists()) {
//            parentDir.mkdirs();
//        }
//        ZipUtil.pack(sourceDir, zipFile);
//    }
//}

package com.iscas.dataaggregation.utils;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iscas.dataaggregation.domain.experiment.OtelLogs;
import com.iscas.dataaggregation.domain.experiment.OtelTraces;
import com.iscas.dataaggregation.domain.experiment.Record;
import com.iscas.dataaggregation.mapper.mysql.FaultcorrelationMapper;
import com.iscas.dataaggregation.mapper.mysql.RecordMapper;
import com.iscas.dataaggregation.service.OtelTracesService;

import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.zeroturnaround.zip.ZipUtil;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

@Component
public class ChaosDataProcessor {

    private static final Logger logger = Logger.getLogger(ChaosDataProcessor.class);

    @Autowired
    private FaultcorrelationMapper faultCorrelationMapper;

    @Autowired
    private ChaosFaultUtils chaosFaultUtils;

    @Autowired
    private ServicesUtils servicesUtils;

    @Autowired
    private MetricsUtils metricsUtils;

    @Autowired
    private LogsUtils logsUtils;

    @Autowired
    private OtelTracesService otelTracesService;

    @Autowired
    private RecordMapper recordMapper;

    // private final ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());

    public String processPlan(int planId, String namespace) {
        String basePath = "/Users/mj/study/ISCS/apache-tomcat-9.0.93/webapps/chaosdata/";
        String timestamp = String.valueOf(System.currentTimeMillis());
        String zipFileName = planId + "_" + timestamp + ".zip";
        String zipFilePath = basePath + zipFileName;

        try {
            logger.info("Starting processing for planId: " + planId);

            // Step 1: Process Basic Information
            logger.info("Fetching fault configuration IDs for planId: " + planId);
            List<Integer> faultConfigIds = faultCorrelationMapper.findFaultConfigIdsByPlanId(planId);

            List<Map<String, String>> basicInfo = new ArrayList<>();
            for (Integer faultConfigId : faultConfigIds) {
                logger.info("Processing faultConfigId: " + faultConfigId);
                String faultName = chaosFaultUtils.getChaosNameByConfigId(faultConfigId, "Inject");
                String nodeOrService = chaosFaultUtils.getNodeOrServiceNameByFaultConfigId(faultConfigId);
                logger.debug("Fault name: " + faultName + ", Node/Service: " + nodeOrService);

                Map<String, String> faultInfo = new HashMap<>();
                faultInfo.put("fault_name", faultName);
                faultInfo.put("node_or_service", nodeOrService);
                basicInfo.add(faultInfo);
            }

            // Step 2: Get Records and Process Timestamps
            logger.info("Fetching records for planId: " + planId);
            List<Record> records = recordMapper.findRecordsByPlanId(planId);

            List<Map<String, Long>> recordsWithTimestamps = new ArrayList<>();
            for (Record record : records) {
                Map<String, Long> recordMap = new HashMap<>();
                recordMap.put("start_time", record.getStart_time().getTime());
                recordMap.put("end_time", record.getEnd_time().getTime());
                recordsWithTimestamps.add(recordMap);
                logger.debug("Record processed: " + record.getId() + ", Start time: " + record.getStart_time() + ", End time: " + record.getEnd_time());
            }

            // Step 3: Get All Service Names
            logger.info("Fetching all service names in namespace: " + namespace);
            List<String> serviceNames = servicesUtils.getServiceNamesByNamespace(namespace);

            // Step 4: Process Records
            File planDir = new File(basePath + planId);
            if (!planDir.exists() && !planDir.mkdirs()) {
                throw new IOException("Failed to create directory: " + planDir.getPath());
            }

            File basicInfoFile = new File(planDir, "basic_info.json");
            ObjectMapper objectMapper = new ObjectMapper();
            objectMapper.writeValue(basicInfoFile, basicInfo);
            logger.info("Basic information written to: " + basicInfoFile.getPath());

            for (Record record : records) {
                long startTime = record.getStart_time().getTime();
                long endTime = record.getEnd_time().getTime();
                File recordDir = new File(planDir, String.valueOf(record.getId()));

                if (!recordDir.exists() && !recordDir.mkdirs()) {
                    throw new IOException("Failed to create directory: " + recordDir.getPath());
                }

                // Trace data
                logger.info("Fetching trace data for recordId: " + record.getId());
                List<OtelTraces> traces = otelTracesService.findTracesByTimeAndNamespace(startTime, endTime, namespace);
                File traceFile = new File(recordDir, "trace.json");
                objectMapper.writeValue(traceFile, traces);
                logger.info("Trace data written to: " + traceFile.getPath());

                // Metrics and Logs data
                List<Map<String, Object>> metricsData = new ArrayList<>();
                List<Map<String, Object>> logsData = new ArrayList<>();

                for (String serviceName : serviceNames) {
                    logger.info("Processing service: " + serviceName);
                    JsonNode metrics = metricsUtils.getMetricsForService(namespace, serviceName, startTime, endTime);
                    Map<String, Object> metricEntry = new HashMap<>();
                    metricEntry.put("service_name", serviceName);
                    metricEntry.put("data", metrics);
                    metricsData.add(metricEntry);
                    logger.debug("Metrics data added for service: " + serviceName);

                    List<OtelLogs> logs = logsUtils.getLogs(startTime, endTime, namespace, serviceName);
                    Map<String, Object> logEntry = new HashMap<>();
                    logEntry.put("service_name", serviceName);
                    logEntry.put("data", logs);
                    logsData.add(logEntry);
                    logger.debug("Logs data added for service: " + serviceName);
                }

                File metricsFile = new File(recordDir, "metrics.json");
                objectMapper.writeValue(metricsFile, metricsData);
                logger.info("Metrics data written to: " + metricsFile.getPath());

                File logsFile = new File(recordDir, "logs.json");
                objectMapper.writeValue(logsFile, logsData);
                logger.info("Logs data written to: " + logsFile.getPath());
            }

            // Compress the directory
            logger.info("Compressing directory: " + planDir.getPath());
            compressDirectory(planDir, zipFilePath);
            logger.info("Data processing and compression completed successfully.");

            return "http://localhost:8080/chaosdata/" + zipFileName;

        } catch (Exception e) {
            logger.error("Error processing plan: " + planId, e);
            throw new RuntimeException("Error processing plan: " + planId, e);
        }
    }

    public static void compressDirectory(File sourceDir, String outputZip) throws IOException {
        File zipFile = new File(outputZip);
        File parentDir = zipFile.getParentFile();
        if (!parentDir.exists()) {
            parentDir.mkdirs();
        }
        ZipUtil.pack(sourceDir, zipFile);
    }
}