package com.nanohadoop.mapreduce;

import com.nanohadoop.hdfs.HDFSClient;
import com.nanohadoop.mapreduce.core.*;
import com.nanohadoop.yarn.ApplicationMaster;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.util.*;

/**
 * MapReduce执行引擎，负责实际执行MapReduce作业
 */
public class MapReduceExecutor {
    private static final Logger log = LoggerFactory.getLogger(MapReduceExecutor.class);
    private final JobConfiguration conf;
    private final HDFSClient hdfsClient;
    private final ApplicationMaster appMaster;

    public MapReduceExecutor(JobConfiguration conf, HDFSClient hdfsClient, ApplicationMaster appMaster) {
        this.conf = conf;
        this.hdfsClient = hdfsClient;
        this.appMaster = appMaster;
    }

    /**
     * 执行MapReduce作业
     */
    public void execute() throws Exception {
        try {
            // 设置状态为运行中
            appMaster.setStatus(ApplicationMaster.ApplicationStatus.RUNNING);

            // 1. 读取输入文件
            List<String> inputLines = hdfsClient.readFileToList(conf.getInputPath());

            // 2. 执行Map阶段
            List<Map.Entry<String, Integer>> mapOutputs = executeMapPhase(inputLines);

            // 3. 执行Shuffle阶段
            Map<String, List<Integer>> groupedData = executeShufflePhase(mapOutputs);

            // 4. 执行Reduce阶段
            List<String> reduceOutputs = executeReducePhase(groupedData);

            // 5. 写入输出文件
            writeOutput(reduceOutputs);

            // 设置状态为完成
            appMaster.setStatus(ApplicationMaster.ApplicationStatus.COMPLETED);

        } catch (Exception e) {
            appMaster.setStatus(ApplicationMaster.ApplicationStatus.FAILED);
            throw e;
        }
    }

    /**
     * 执行Map阶段
     */
    @SuppressWarnings("unchecked")
    private List<Map.Entry<String, Integer>> executeMapPhase(List<String> inputLines) throws Exception {
        List<Map.Entry<String, Integer>> mapOutputs = new ArrayList<>();

        // 创建Mapper实例
        Class<?> mapperClass = Class.forName(conf.getMapperClass());
        Mapper<String, String, String, Integer> mapper =
                (Mapper<String, String, String, Integer>) mapperClass.getDeclaredConstructor().newInstance();

        // 创建MapperContext
        MapperContext<String, String, String, Integer> context = new MapperContext<String, String, String, Integer>() {
            @Override
            public void write(String key, Integer value) {
                mapOutputs.add(new AbstractMap.SimpleEntry<>(key, value));
            }
        };

        // 执行Map操作
        for (String line : inputLines) {
            mapper.map("line", line, context);
        }

        return mapOutputs;
    }

    /**
     * 执行Shuffle阶段
     */
    private Map<String, List<Integer>> executeShufflePhase(List<Map.Entry<String, Integer>> mapOutputs) {
        Map<String, List<Integer>> groupedData = new HashMap<>();

        for (Map.Entry<String, Integer> entry : mapOutputs) {
            groupedData.computeIfAbsent(entry.getKey(), k -> new ArrayList<>())
                    .add(entry.getValue());
        }

        return groupedData;
    }

    /**
     * 执行Reduce阶段
     */
    @SuppressWarnings("unchecked")
    private List<String> executeReducePhase(Map<String, List<Integer>> groupedData) throws Exception {
        List<String> reduceOutputs = new ArrayList<>();

        // 创建Reducer实例
        Class<?> reducerClass = Class.forName(conf.getReducerClass());
        Reducer<String, Integer, String, Integer> reducer =
                (Reducer<String, Integer, String, Integer>) reducerClass.getDeclaredConstructor().newInstance();

        // 创建ReducerContext
        ReducerContext<String, Integer, String, Integer> context = new ReducerContext<String, Integer, String, Integer>() {
            @Override
            public void write(String key, Integer value) {
                reduceOutputs.add(key + "\t" + value);
            }
        };

        // 执行Reduce操作
        for (Map.Entry<String, List<Integer>> entry : groupedData.entrySet()) {
            reducer.reduce(entry.getKey(), entry.getValue().iterator(), context);
        }

        return reduceOutputs;
    }

    /**
     * 写入输出文件
     */
    private void writeOutput(List<String> reduceOutputs) throws IOException {
        String outputPath = conf.getOutputPath() + "/part-r-00000";
        StringBuilder content = new StringBuilder();

        // 排序输出
        Collections.sort(reduceOutputs);
        // 排序输出
        Collections.sort(reduceOutputs);

        // 构建输出内容
        for (String line : reduceOutputs) {
            content.append(line).append("\n");
        }
        // 写入HDFS
        hdfsClient.createFile(outputPath, content.toString());
    }
}