package com.itcast.flink.usage.batch;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static java.util.regex.Pattern.compile;

/**
 * @program: flink-app
 * @description: 通过批处理命令, 统计日志文件中的异常信息
 * @author: zhanghz001
 * @create: 2021-07-22 10:26
 **/
public class ZhzBatchProcessorApplication {
    public static void main(String[] args) throws Exception {
        // 定义运行环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        // 读取数据源
        DataSource<String> logData = env.readTextFile("./data/order_info.log");
        logData.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String value,
                                Collector<Tuple2<String, Integer>> collector)
                    throws Exception {
                //根据正则表达式,提取每行的日志级别
                Pattern pattern = compile("\\[main\\] (.*?)\\[");
                Matcher matcher = pattern.matcher(value);
                if (matcher.find()) {
                    //    匹配成功放入元组
                    collector.collect(new Tuple2<>(matcher.group(1).trim(), 1));
                }
            }
        }).groupBy(0).sum(1).print();// 4. 根据日志级别， 汇总统计， 打印结果
        
    }
}
