package com.github.codemperor.core.logger.test.mr;

import com.alibaba.fastjson.JSONObject;
import com.github.codemperor.core.logger.BaseLogger;
import com.github.codemperor.core.logger.compute.mapreduce.MapReduceProcess;
import com.github.codemperor.core.logger.compute.streaming.SparkStreaming;
import com.github.codemperor.core.logger.test.mr.logfile.LogFileTestMapper;
import com.github.codemperor.core.logger.test.mr.logfile.LogFileTestReducer;
import com.github.codemperor.core.logger.test.mr.wordcount.WordCountMapper;
import com.github.codemperor.core.logger.test.mr.wordcount.WordCountReducer;

import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

public class WordCountMain {
    public static void main(String[] args) throws Exception {
//        wordCount();
//        logProcess();
//        streaming();
        streamingLogFile();
    }

    public static void wordCount() {
        List<String> dataList = Arrays.asList(
                BaseLogger.conver("hello", "world", "hello"),
                BaseLogger.conver("text", "world", "test"),
                BaseLogger.conver("test", "we", "hello"),
                BaseLogger.conver("we", "text", "world"),
                BaseLogger.conver("a", "a", "a", "a", "a", "a", "a", "a", "a", "a")
        );


        Map<Object, Object> result = MapReduceProcess.getInstance()
                .setMapperClass(new WordCountMapper())
                .setReducerClass(new WordCountReducer())
                .execute(dataList);

        System.out.println(JSONObject.toJSONString(result));
    }

    public static void logProcess() throws IOException {
        Map<Object, Object> result = MapReduceProcess.getInstance()
                .setMapperClass(new LogFileTestMapper())
                .setReducerClass(new LogFileTestReducer())
                .execute("/Users/aihuishou/workspace/uptrade-platform/uptrade-auth/logs/data.log");

        System.out.println(JSONObject.toJSONString(result));
    }

    public static void streaming() {
        List<String> dataList = Arrays.asList(
                BaseLogger.conver("hello", "world", "hello"),
                BaseLogger.conver("text", "world", "test"),
                BaseLogger.conver("test", "we", "hello"),
                BaseLogger.conver("we", "text", "world"),
                BaseLogger.conver("a", "a", "a", "a", "a", "a", "a", "a", "a", "a")
        );
        Map<Object, Object> result = SparkStreaming.getInstance(dataList)
                .map((line, context) -> {
                    List<String> lineList = BaseLogger.resolver((String) line);
                    lineList.forEach(l -> context.map(l, 1));
                }).reduce((k, values, context) -> {
                    context.reduce(k, values.size());
                }).getResult();

        System.out.println(JSONObject.toJSONString(result));
    }

    public static void streamingLogFile() throws Exception {
        Map<Object, Object> result = SparkStreaming.getInstance("/Users/aihuishou/workspace/uptrade-platform/uptrade-auth/logs/data.log")
                .map((line, context) -> {
                    List<String> lineList = BaseLogger.resolver((String) line);
                    String ip = lineList.get(5);
                    String api = lineList.get(6);
                    String userEmail = lineList.get(7);
                    context.map(api, ip);
                })
                .reduce((k, values, context) -> context.reduce(k, values))
                .getResult();

//        System.out.println(JSONObject.toJSONString(result));
        result.forEach((k, v) -> System.out.println(k + ": " + v));
    }
}
