package com.ksc.wordcount.driver;

import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Identify;
import akka.actor.Props;
import com.ksc.wordcount.datasourceapi.*;
import com.ksc.wordcount.rpc.Driver.DriverActor;
import com.ksc.wordcount.rpc.Driver.DriverSystem;
import com.ksc.wordcount.service.server.ThriftServer;
import com.ksc.wordcount.shuffle.ShuffleBlockId;
import com.ksc.wordcount.task.*;
import com.ksc.wordcount.task.map.MapFunction;
import com.ksc.wordcount.task.map.MapTaskContext;
import com.ksc.wordcount.task.reduce.MiddleReduceContext;
import com.ksc.wordcount.task.reduce.ReduceFunction;
import com.ksc.wordcount.task.reduce.ReduceTaskContext;
import com.ksc.wordcount.util.PropertyUtil;
import com.ksc.wordcount.util.UrlTopNConfListener;

import java.io.Serializable;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;

public class WordCountDriver implements Serializable {

    public static void main(String[] args) {
//        List<Map<String, String>> masterProperties = PropertyUtil.getMasterProperties();
//        Map<String, String> master = masterProperties.get(0);

//        System.out.println(master);
//        String ip = master.get("ip");
//        String akkaPort = master.get("akkaPort");
//        String thriftPort = master.get("thriftPort");
//        String memory = master.get("memory");

        String ip = args[0];
        String akkaPort = args[1];
        String thriftPort = args[2];
        String memory = args[3];

        DriverEnv.host = ip;
        DriverEnv.port = Integer.parseInt(akkaPort);

        Map<String, String> urltopnProperty = PropertyUtil.getUrltopnProperties();

        System.out.println(urltopnProperty);

        String inputPath = urltopnProperty.get("inputPath");
        String outputPath = urltopnProperty.get("outputPath");
        String applicationId = urltopnProperty.get("applicationId");
        int reduceTaskNum = Integer.parseInt(urltopnProperty.get("reduceTask"));
        int TopN = Integer.parseInt(urltopnProperty.get("topN"));
        int splitSize = Integer.parseInt(urltopnProperty.get("splitSize"));

        Consumer<Map<String, String>> callback = (property) -> {
            // 收到最新的配置，执行相应的操作
            System.out.println("Received latest configuration:");
            System.out.println(property);

            // 执行相应的操作
            String lastedInputPath = property.get("inputPath");
            String lastedOutputPath = property.get("outputPath");
            String lastedApplicationId = property.get("applicationId");
            int lastedReduceTaskNum = Integer.parseInt(property.get("reduceTask"));
            int lastedTopN = Integer.parseInt(property.get("topN"));
            int lastedSplitSize = Integer.parseInt(property.get("splitSize"));

            mapReduce(lastedInputPath,lastedOutputPath,lastedApplicationId,lastedReduceTaskNum,lastedTopN,lastedSplitSize);

        };

        // 创建执行器系统和DriverActor
        // 2、启动akka服务
        ActorSystem executorSystem = DriverSystem.getExecutorSystem();
        ActorRef driverActorRef = executorSystem.actorOf(Props.create(DriverActor.class), "driverActor");
        System.out.println("ServerActor started at: " + driverActorRef.path().toString());

        mapReduce(inputPath,outputPath,applicationId,reduceTaskNum,TopN,splitSize);

        UrlTopNConfListener.start(callback);
        // 启动thrift服务端监听
        ThriftServer.start(Integer.parseInt(thriftPort));
    }

    private static void mapReduce(String inputPath,
                                  String outputPath,
                                  String applicationId,
                                  int reduceTaskNum,
                                  int TopN,
                                  int splitSize){

        // 定义文件格式
        FileFormat fileFormat = new SplitFileFormat();

        // 获取划分后的分区文件数组
        // 1、切分split
        PartionFile[] partionFiles = fileFormat.getSplits(inputPath, splitSize);

        // 创建任务管理器
        TaskManager taskScheduler = DriverEnv.taskManager;
        // 创建Map阶段的任务
        int mapStageId = 0;
        //添加stageId和任务的映射
        taskScheduler.registerBlockingQueue(mapStageId, new LinkedBlockingQueue());

        for (PartionFile partionFile : partionFiles) {
            // 定义Map函数
            MapFunction wordCountMapFunction = new MapFunction<String, KeyValue>() {
                @Override
                public Stream<KeyValue> map(Stream<String> stream) {
                    //////todo 将输入流中的每个url拆分，并将其映射为键值对（url，1）
                    Pattern urlPattern = Pattern.compile("(http://[^\\s\"]+)"); // URL的正则表达式模式
                    // 按行处理文件流
                    Stream<KeyValue> keyValueStream = stream
                            // 将每行应用于正则表达式模式，提取URL
                            .flatMap(line -> {
                                Matcher matcher = urlPattern.matcher(line);
                                List<String> urls = new ArrayList<>();
                                while (matcher.find()) {
                                    urls.add(matcher.group(1));
                                }
                                return urls.stream();
                            })
                            // 将URL映射为键值对（URL，1）
                            .map(url -> new KeyValue(url, 1));

                    // 返回键值对流
                    return keyValueStream;
                }
            };

            // 创建Map任务上下文
            MapTaskContext mapTaskContext = new MapTaskContext(applicationId,
                    "stage_" + mapStageId,
                    taskScheduler.generateTaskId(),
                    partionFile.getPartionId(),
                    partionFile,
                    fileFormat.createReader(),
                    reduceTaskNum,
                    wordCountMapFunction);

            // 将Map任务上下文添加到任务管理器中
            // mapTaskContext添加进阻塞对列，并与mapStageId形成映射，用户可通过mapStageId找到这个任务
            // 3、按照split分区创建task
            taskScheduler.addTaskContext(mapStageId, mapTaskContext);
        }

        // 提交Map阶段的任务
        DriverEnv.taskScheduler.submitTask(mapStageId);
        // 等待Map阶段任务执行完成
        DriverEnv.taskScheduler.waitStageFinish(mapStageId);

        // 创建Reduce阶段的任务
        int reduceStageId = 3;
        taskScheduler.registerBlockingQueue(reduceStageId, new LinkedBlockingQueue());

        for (int i = 0; i < reduceTaskNum; i++) {
            // 获取相应Reduce任务所需的Shuffle块ID数组
            ShuffleBlockId[] stageShuffleIds = taskScheduler.getStageShuffleIdByReduceId(mapStageId, i);

            // 定义Reduce函数
            ReduceFunction<String, Integer, String, Integer> reduceFunction = new ReduceFunction<String, Integer, String, Integer>() {
                @Override
                public Stream<KeyValue<String, Integer>> reduce(Stream<KeyValue<String, Integer>> stream) {
                    // 使用HashMap对相同的键进行统计求和
                    ///// todo: 实现 定义reducetask处理数据的规则
                    HashMap<String, Integer> map = new HashMap<>();
                    stream.forEach(e -> {
                        String key = e.getKey();
                        Integer value = e.getValue();
                        if (map.containsKey(key)) {
                            map.put(key, map.get(key) + value);
                        } else {
                            map.put(key, value);
                        }
                    });

                    return map.entrySet().stream().map(e -> new KeyValue(e.getKey(), e.getValue()));
                }
            };

            // 创建分区写入器
            PartionWriter partionWriter = fileFormat.createWriter(outputPath, i,applicationId);

            // 创建Reduce任务上下文
            MiddleReduceContext middleReduceContext = new MiddleReduceContext(
                    applicationId,
                    "stage_" + reduceStageId,
                    taskScheduler.generateTaskId(),
                    i,
                    stageShuffleIds,
                    reduceFunction,
                    partionWriter,
                    1
            );
            // 将Reduce任务上下文添加到任务管理器中
            taskScheduler.addTaskContext(reduceStageId, middleReduceContext);
        }

        // 提交Reduce阶段的任务
        DriverEnv.taskScheduler.submitTask(reduceStageId);
        // 等待Reduce阶段任务执行完成
        DriverEnv.taskScheduler.waitStageFinish(reduceStageId);


        // 创建Reduce阶段的任务
        int sortStageId = 2;
        taskScheduler.registerBlockingQueue(sortStageId, new LinkedBlockingQueue());

        for (int i = 0; i < 1; i++) {
            // 获取相应Reduce任务所需的Shuffle块ID数组
            ShuffleBlockId[] stageShuffleIds = taskScheduler.getStageShuffleIdByReduceId(reduceStageId, i);

            // 定义Reduce函数
            ReduceFunction<String, Integer, String, Integer> sortFunction = new ReduceFunction<String, Integer, String, Integer>() {
                @Override
                public Stream<KeyValue<String, Integer>> reduce(Stream<KeyValue<String, Integer>> stream) {
                    // 对stream中的元素进行从大到小排序
                    List<KeyValue<String, Integer>> sortedList = stream
                            .sorted(Comparator.comparing((KeyValue<String, Integer> kv) -> kv.getValue()).reversed())
                            .collect(Collectors.toList());

                    // 找到 topN 名的值
                    int topNValue = sortedList.get(Math.min(TopN, sortedList.size()) - 1).getValue();

                    // 过滤出 topN 名及并列的数据
                    List<KeyValue<String, Integer>> filteredList = sortedList.stream()
                            .filter(kv -> kv.getValue() >= topNValue)
                            .collect(Collectors.toList());

                    // 构造排序后的 KeyValue 流
                    return filteredList.stream().map(e -> new KeyValue(e.getKey(), e.getValue()));
                }
            };

            // 创建分区写入器
            PartionWriter partionWriter = fileFormat.createWriter(outputPath, i, applicationId);

            // 创建Reduce任务上下文
            ReduceTaskContext sortTaskContext = new ReduceTaskContext(applicationId,
                    "stage_" + sortStageId,
                    taskScheduler.generateTaskId(),
                    i,
                    stageShuffleIds,
                    sortFunction,
                    partionWriter);

            // 将Reduce任务上下文添加到任务管理器中
            taskScheduler.addTaskContext(sortStageId, sortTaskContext);
        }

        // 提交Reduce阶段的任务
        DriverEnv.taskScheduler.submitTask(sortStageId);
        // 等待Reduce阶段任务执行完成
        DriverEnv.taskScheduler.waitStageFinish(sortStageId);

        //清空本次task任务
        DriverEnv.ClearTask();
        System.out.println("job finished");

    }

}