package com.atguigu.flink.chapter06;

import com.atguigu.flink.bean.UserBehavior;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

/**
 * TODO
 *
 * @author cjp
 * @version 1.0
 * @date 2021/8/10 14:20
 */
public class Flink02_PV {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 1.读取数据
        SingleOutputStreamOperator<UserBehavior> userBehaviorDS = env
                .readTextFile("F:\\atguigu\\01_course\\code\\flink210323\\input\\UserBehavior.csv")
                .map(r -> {
                    String[] datas = r.split(",");
                    return new UserBehavior(
                            Long.parseLong(datas[0]),
                            Long.parseLong(datas[1]),
                            Integer.parseInt(datas[2]),
                            datas[3],
                            Long.parseLong(datas[4])
                    );
                });

        // 2.处理数据
        // 2.1 优先考虑过滤: 只保留 pv 的数据
        SingleOutputStreamOperator<UserBehavior> pvDS = userBehaviorDS.filter(r -> "pv".equals(r.getBehavior()));
        // 2.2 考虑用 process，实现计数的功能
        pvDS
                .process(new ProcessFunction<UserBehavior, Integer>() {

                    private int pvCount = 0;

                    @Override
                    public void processElement(UserBehavior value, Context ctx, Collector<Integer> out) throws Exception {
                        pvCount++;
                        out.collect(pvCount);
                    }
                }).setParallelism(1)
                .print().setParallelism(1);


        env.execute();
    }
}
