package com.github.codemperor.core.logger.compute.streaming;

import com.github.codemperor.core.logger.compute.Context;
import com.github.codemperor.core.logger.compute.IMapperStreamingFunction;
import com.github.codemperor.core.logger.compute.IReducerStreamingFunction;

import java.io.BufferedReader;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * 流式（实时）计算
 */
public class SparkStreaming {
    private Context context = new Context();
    /**
     * 原始数据（日志行数据）
     */
    private List<String> dataList;

    private SparkStreaming(List<String> dataList) {
        this.dataList = dataList;
    }

    public static SparkStreaming getInstance(String path) throws Exception {
        BufferedReader in = new BufferedReader(new FileReader(path));
        List<String> dataList = new ArrayList<>();
        StringBuffer sb;
        while (in.ready()) {
            sb = (new StringBuffer(in.readLine()));
            if (!"".equals(sb.toString().trim())) {
                dataList.add(sb.toString());
            }
        }
        in.close();
        return new SparkStreaming(dataList);
    }

    public static SparkStreaming getInstance(List<String> dataList) {
        return new SparkStreaming(dataList);
    }

    private void splitting() {
    }

    public SparkStreaming map(IMapperStreamingFunction function) {
        dataList.forEach(d -> function.mapping(d, context));
        return this;
    }

    public SparkStreaming reduce(IReducerStreamingFunction function) {
        context.getMapperBlock().forEach((k, v) -> function.reducing(k, v, context));
        return this;
    }

    public Map<Object, Object> getResult() {
        return context.getResult();
    }
}
