package cn.itcast.b_etl.transformation;

import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 需求: 读取本地日志文件apache.log,过滤出get方法
 */
public class MapFilterDemo {

    public static void main(String[] args) throws Exception {
        /**
         * 1.获取StreamExecutionEnvironment运行环境
         * 2.读取本地文件<apache.log>
         * 3.数据转换->map
         *   将日志里面的字符转换成bean(LogEvent)
         * 4.数据打印
         * 5.触发执行
         */
        // 1.获取StreamExecutionEnvironment运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //2.读取本地文件
        DataStreamSource<String> source = env.readTextFile("data/apache.log");
        //3.数据转换->map
        //将日志里面的字符转换成bean(LogEvent)
        //10.0.0.1 10003 17/05/2015:10:26:53 POST /presentations/logstash-monitorama-2013/css/print/paper.css
        source.map(new MapFunction<String, LogEvent>() {
            @Override
            public LogEvent map(String value) throws Exception {
                //10.0.0.1 10003 17/05/2015:10:26:53 POST /presentations/logstash-monitorama-2013/css/print/paper.css
                String[] arr = value.split(" ");
                return new LogEvent(
                        arr[0],
                        arr[1],
                        arr[2],
                        arr[3],
                        arr[4]
                );
            }
        }).filter(new FilterFunction<LogEvent>() {
            @Override
            public boolean filter(LogEvent value) throws Exception {
                return value.getMethod().equals("GET");
            }
        }).print();

        //5.触发执行
        env.execute();
    }

}
