package com.wolffy.flink.basic;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.connector.file.src.reader.TextLineInputFormat;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class PV1 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 从文件读
        FileSource<String> fileSource = FileSource.forRecordStreamFormat(
                        new TextLineInputFormat(),
                        new Path("input/UserBehavior.csv"))
                .build();
        //读取一行一行数据：543462,1715,1464116,pv,1511658000
        DataStreamSource<String> filesource = env.fromSource(fileSource, WatermarkStrategy.noWatermarks(), "file-source");

        // 转换成Bean对象
        SingleOutputStreamOperator<UserBehavior> mapDS = filesource.map(line -> {
            // 对数据切割, 然后封装到POJO中
            String[] split = line.split(",");
            return new UserBehavior(
                    Long.valueOf(split[0]),
                    Long.valueOf(split[1]),
                    Integer.valueOf(split[2]),
                    split[3],
                    Long.valueOf(split[4]));
        });

        // 过滤出pv行为
        SingleOutputStreamOperator<UserBehavior> filterDS = mapDS.filter(behavior -> "pv".equals(behavior.getBehavior()));

        // 使用Tuple类型, 方便后面求和
        filterDS.map(behavior -> Tuple2.of("pv", 1L)).returns(Types.TUPLE(Types.STRING, Types.LONG))
                .keyBy(value -> value.f0)  // keyBy: 按照key分组
                .sum(1) // 求和
                .print();

        env.execute();
    }
}
