package com.atguigu.flink.chapter06;

import com.atguigu.flink.bean.UserBehavior;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * TODO
 *
 * @author cjp
 * @version 1.0
 * @date 2021/8/10 14:20
 */
public class Flink01_PV {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 1.读取数据
        SingleOutputStreamOperator<UserBehavior> userBehaviorDS = env
                .readTextFile("F:\\atguigu\\01_course\\code\\flink210323\\input\\UserBehavior.csv")
                .map(r -> {
                    String[] datas = r.split(",");
                    return new UserBehavior(
                            Long.parseLong(datas[0]),
                            Long.parseLong(datas[1]),
                            Integer.parseInt(datas[2]),
                            datas[3],
                            Long.parseLong(datas[4])
                    );
                });

        // 2.处理数据
        // 2.1 优先考虑过滤: 只保留 pv 的数据
        SingleOutputStreamOperator<UserBehavior> pvDS = userBehaviorDS.filter(r -> "pv".equals(r.getBehavior()));
        // 2.2 转换数据格式： 参考 wordcount的思路 （"pv",1）
        SingleOutputStreamOperator<Tuple2<String, Integer>> pvAndOneDS = pvDS.map(new MapFunction<UserBehavior, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(UserBehavior value) throws Exception {
                return Tuple2.of("pv", 1);
            }
        });
        // 2.3 分组、求和
        pvAndOneDS
                .keyBy(r -> r.f0)
                .sum(1)
                .print();


        env.execute();
    }
}
