package com.atguigu.flink.chapter03_exec1;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2022/10/24
 *      PV: 434349
 */
public class Demo1_PV
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //env.setRuntimeMode(RuntimeExecutionMode.BATCH);

        env
            .readTextFile("data/UserBehavior.csv")
            .map(new MapFunction<String, String>()
            {
                @Override
                public String map(String value) throws Exception {
                    String[] words = value.split(",");
                    return words[3];
                }
            })
            .filter(new FilterFunction<String>()
            {
                @Override
                public boolean filter(String value) throws Exception {
                    return "pv".equals(value);
                }
            })
            //全局汇总
            .global()
            .process(new ProcessFunction<String, Long>()
            {
                //状态
                Long result = 0l;
                @Override
                public void processElement(String value, Context ctx, Collector<Long> out) throws Exception {

                    result += 1;
                    out.collect(result);

                }
            }).setParallelism(1)
            .print().setParallelism(1);

        try {
                    env.execute();
                } catch (Exception e) {
                    e.printStackTrace();
                }
    }
}
